From 05020e232a7ad4e980828e5852d4ef68f9fd0b83 Mon Sep 17 00:00:00 2001 From: eurodatacube-submissions <61697821+eurodatacube-submissions@users.noreply.github.com> Date: Tue, 31 Jan 2023 19:04:53 +0100 Subject: [PATCH 1/3] Add notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb for pull request submission --- notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb | 1 + 1 file changed, 1 insertion(+) create mode 100644 notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb diff --git a/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb b/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb new file mode 100644 index 00000000..bd4041fc --- /dev/null +++ b/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "metadata": {}, "source": ["# Vessel Density based on Copernicus Sentinel-1\n", "---\n", "_Author: Alessandro Cimbelli, Oct 2022_\n", "\n", "In this updated proposal, the marine area of interest is divided and analysed into areas of 100 km x 100 km\n", "#### Methodology \n", "1. Definition of the boxes ID to be analysed.\n", "2. Extraction of the footprints of the Sentinel-1 (VV band) images acquired in the time period (one month).\n", "3. Extraction of vessel centroids in each image\n", "4. Map of vessel density for each 1-km grid tile of the selected area\n", "5. Production of geojson and csv files\n"]}, {"cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": ["year = 2020\n", "months = [1,2,3,4,5,6,7,8,9,10,11,12]"]}, {"cell_type": "code", "execution_count": 2, "metadata": {"tags": []}, "outputs": [], "source": ["#from edc import check_compatibility\n", "#check_compatibility(\"user-0.24.5\")"]}, {"cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": ["#from edc import setup_environment_variables\n", "#setup_environment_variables()"]}, {"cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": ["from xcube_sh.config import CubeConfig\n", "from xcube_sh.cube import open_cube\n", "from xcube_sh.sentinelhub import SentinelHub\n", "from xcube.core.geom import (\n", " clip_dataset_by_geometry, \n", " mask_dataset_by_geometry,\n", " convert_geometry,\n", " rasterize_features\n", ")\n", "import xarray as xr\n", "import numpy as np\n", "import shapely.geometry\n", "import IPython.display\n", "import calendar\n", "\n", "import datetime\n", "import os, sys\n", "%matplotlib inline\n", "import matplotlib.pyplot as plt\n", "import rasterio, rioxarray\n", "import geopandas as gpd\n", "import pandas as pd\n", "import warnings\n", "import scipy\n", "import pyproj\n", "from shapely.ops import transform, cascaded_union, unary_union\n", "from shapely.geometry import shape, Point, Polygon, LineString, MultiPoint, MultiPolygon, mapping \n", "from rasterio.features import sieve, shapes\n", "from rasterio.plot import show, plotting_extent\n", "from rasterio.enums import Resampling\n", "import json\n", "from oauthlib.oauth2 import BackendApplicationClient\n", "from requests_oauthlib import OAuth2Session\n", "import requests\n", "import csv\n", "import shutil"]}, {"cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": ["# Your client credentials\n", "client_id = os.environ['SH_CLIENT_ID']\n", "client_secret = os.environ['SH_CLIENT_SECRET']\n", "\n", "grid_name = \"grid100k_geo.geojson\"\n", "grid100k = gpd.read_file(grid_name)\n", "\n", "res1 = 0.0054 # 0.00054 # lower resolution for satellite footprints\n", "res2 = 0.00018 # Higher resolution for vessel shape and number\n", "#dres = (res1-res2)/2\n", "###############################\n", "# csv parameters\n", "eo_sensor = 'Sentinel-1'\n", "indicator_code = 'E1b'\n", "y_axis = 'Vessel density'\n", "indicator_name = 'Vessel density'\n", "data_provider = \"cimbelli\"\n", "description = 'Number of vessels in the AOI'\n", "method = 'Object detection of vessels from Sentinel-1 images'\n", "update_freq = 'Monthly'"]}, {"cell_type": "code", "execution_count": 6, "metadata": {"tags": []}, "outputs": [], "source": ["# This function extracts the centroids of all the vessels in an image\n", "def vesselShape(imgtif):\n", "\n", " img1tmp = imgtif.astype(rasterio.uint8)\n", " img2tmp = rasterio.features.sieve(img1tmp.to_numpy(), 15, connectivity=8)\n", " \n", " with rasterio.Env():\n", " image = img2tmp #img_vessels.read(1) # first band\n", " results = (\n", " {'properties': {'raster_val': v}, 'geometry': s}\n", " for i, (s, v) \n", " in enumerate(\n", " shapes(image, mask=None, transform=imgtif.rio.transform())))\n", " geoms = list(results)\n", " npoly = len(geoms)\n", " npoly_1 = 0\n", " \n", "# for each vessel get shape information\n", " \n", " ppc = MultiPoint([])\n", " if npoly > 0:\n", " for p in range(npoly):\n", " val = int(geoms[p]['properties']['raster_val'])\n", "\n", " if val==1:\n", " npoly_1 += 1\n", " pp = shape(geoms[p]['geometry'])\n", " pp_centroids = pp.centroid #.coords \n", " ppc = ppc.union(pp_centroids) \n", " return ppc, npoly_1\n", " else:\n", " return None, 0\n", " "]}, {"cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": ["def sentinelInfo(data, box, n):\n", "\n", " # Create a session\n", " client = BackendApplicationClient(client_id=client_id)\n", " oauth = OAuth2Session(client=client)\n", "\n", " # Get token for the session\n", " token = oauth.fetch_token(token_url='https://services.sentinel-hub.com/oauth/token', client_secret=client_secret)\n", "\n", " # All requests using this session will have an access token automatically added\n", " resp = oauth.get(\"https://services.sentinel-hub.com/oauth/tokeninfo\") \n", " token1 = token['access_token']\n", " \n", " data = '{\"collections\":[\"sentinel-1-grd\"], \"datetime\":\"' + data + '\",\"bbox\": [' + box + '] ,\"limit\": ' + str(n) + '}' # 2020-01-01T00:00:00Z/2020-01-31T23:59:59Z - 10,54.27,11,54.6\n", " headers = {\"Content-Type\": \"application/json\" , \"Authorization\": \"Bearer \" + token1 }\n", " endpoint = \"https://services.sentinel-hub.com/api/v1/catalog/search\"\n", " \n", " ggg = requests.post(endpoint, data = data, headers=headers)\n", " \n", " res = json.loads(ggg.content.decode('UTF-8'))\n", " id = res['features'][0]['id']\n", " sat = id[2:3]\n", " \n", " ddatetime = res['features'][0]['properties']['datetime'][:-1]\n", " \n", " return sat, ddatetime \n", " "]}, {"cell_type": "markdown", "metadata": {}, "source": ["### Definition of the boxes ID to be analysed. Please edit them in the list below the map.\n", "\n", "_The mask of all European water areas has been produced from the coastline shapefile available at the page:_\n", "\n", "https://www.eea.europa.eu/data-and-maps/data/eea-coastline-for-analysis-1/gis-data/europe-coastline-shapefile\n"]}, {"cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [{"data": {"text/html": ["\n"], "text/plain": [""]}, "metadata": {}, "output_type": "display_data"}], "source": ["%%html\n", ""]}, {"cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [{"data": {"application/vnd.jupyter.widget-view+json": {"model_id": "1d9edb3e66bc433ca48105d2300abb11", "version_major": 2, "version_minor": 0}, "text/plain": ["Map(center=[49.0, 9.0], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom_in_title', 'zoom_out_\u2026"]}, "execution_count": 9, "metadata": {}, "output_type": "execute_result"}], "source": ["with open('grid100k_geo.geojson', 'r') as f:\n", " data = json.load(f)\n", "\n", "from ipyleaflet import Map, basemaps, basemap_to_tiles, GeoJSON, Marker, DivIcon, WidgetControl\n", "m = Map(\n", " basemap = basemap_to_tiles(basemaps.OpenStreetMap.Mapnik),\n", " center = (49.0, 9.0),\n", " zoom = 5\n", " )\n", "geo_json = GeoJSON(\n", " data = data,\n", " style = {'opacity':1, 'fillOpacity':0.1, 'weight':1, 'fillColor':'green'},\n", " hover_style = {'color':'blue', 'fillOpacity':0.5, 'weight':1.2}\n", " )\n", "for i in range(0,len(grid100k)):\n", " extx = grid100k.iloc[i][\"geometry\"].bounds[2]-grid100k.iloc[i][\"geometry\"].bounds[0]\n", " lo = grid100k.iloc[i][\"geometry\"].bounds[0] + extx*0.25 #grid100k.iloc[i][\"geometry\"].centroid.x\n", " la = grid100k.iloc[i][\"geometry\"].centroid.y\n", " marker = Marker(\n", " location = [la, lo], \n", " icon = DivIcon(html=f\"\"\"
{grid100k.iloc[i]['Aoi_Id']}
\"\"\",),\n", " draggable=False\n", " )\n", " m.add_layer(marker)\n", "m.add_layer(geo_json)\n", "m.layout.height = '800px'\n", "m"]}, {"cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": ["# TILE LIST\n", "\n", "# 'BE1','DE1','DE2','DK2','EE1','GR7',\n", "# 'ES1','ES18','ES3','ES4','ES5','FR2',\n", "# 'FR3','FR4','IE2','IT3','IT16','IT17',\n", "# 'IT18','IT19','LT1','LV1','NL3','NL4',\n", "# 'NO3','NO4','PL1','PT2','RO1','RU1',\n", "# 'TR1','TR2'\n", "\n", "bbox_list = ('NL3',)\n", "\n"]}, {"cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [{"ename": "SentinelHubError", "evalue": "403 Client Error: Forbidden for url: https://services.sentinel-hub.com/api/v1/catalog/search", "output_type": "error", "traceback": ["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mHTTPError\u001b[0m Traceback (most recent call last)", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/sentinelhub.py:645\u001b[0m, in \u001b[0;36mSentinelHubError.maybe_raise_for_response\u001b[0;34m(cls, response)\u001b[0m\n\u001b[1;32m 644\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 645\u001b[0m \u001b[43mresponse\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mraise_for_status\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 646\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m requests\u001b[38;5;241m.\u001b[39mHTTPError \u001b[38;5;28;01mas\u001b[39;00m e:\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/requests/models.py:1021\u001b[0m, in \u001b[0;36mResponse.raise_for_status\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1020\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m http_error_msg:\n\u001b[0;32m-> 1021\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m HTTPError(http_error_msg, response\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m)\n", "\u001b[0;31mHTTPError\u001b[0m: 403 Client Error: Forbidden for url: https://services.sentinel-hub.com/api/v1/catalog/search", "\nThe above exception was the direct cause of the following exception:\n", "\u001b[0;31mSentinelHubError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn [11], line 56\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[38;5;66;03m# ---------------------------------------------\u001b[39;00m\n\u001b[1;32m 48\u001b[0m config_s1 \u001b[38;5;241m=\u001b[39m CubeConfig(dataset_name \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mS1GRD\u001b[39m\u001b[38;5;124m'\u001b[39m,\n\u001b[1;32m 49\u001b[0m band_names \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mVV\u001b[39m\u001b[38;5;124m'\u001b[39m],\n\u001b[1;32m 50\u001b[0m tile_size \u001b[38;5;241m=\u001b[39m [\u001b[38;5;241m512\u001b[39m, \u001b[38;5;241m512\u001b[39m],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 54\u001b[0m time_range \u001b[38;5;241m=\u001b[39m time_interval, \n\u001b[1;32m 55\u001b[0m ) \n\u001b[0;32m---> 56\u001b[0m s1 \u001b[38;5;241m=\u001b[39m \u001b[43mopen_cube\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconfig_s1\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 57\u001b[0m n_img \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlen\u001b[39m(s1\u001b[38;5;241m.\u001b[39mVV\u001b[38;5;241m.\u001b[39mtime)\n\u001b[1;32m 58\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124maoi\u001b[39m\u001b[38;5;124m\"\u001b[39m, bbox_list[b], \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m - \u001b[39m\u001b[38;5;124m\"\u001b[39m,n_img, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mimages in the month of\u001b[39m\u001b[38;5;124m'\u001b[39m, month, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m-\u001b[39m\u001b[38;5;124m'\u001b[39m,year)\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/cube.py:63\u001b[0m, in \u001b[0;36mopen_cube\u001b[0;34m(cube_config, observer, trace_store_calls, max_cache_size, sentinel_hub, **sh_kwargs)\u001b[0m\n\u001b[1;32m 60\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m sh_kwargs:\n\u001b[1;32m 61\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124munexpected keyword-arguments:\u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 62\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(sh_kwargs\u001b[38;5;241m.\u001b[39mkeys())\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m---> 63\u001b[0m cube_store \u001b[38;5;241m=\u001b[39m \u001b[43mSentinelHubChunkStore\u001b[49m\u001b[43m(\u001b[49m\u001b[43msentinel_hub\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 64\u001b[0m \u001b[43m \u001b[49m\u001b[43mcube_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 65\u001b[0m \u001b[43m \u001b[49m\u001b[43mobserver\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mobserver\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 66\u001b[0m \u001b[43m \u001b[49m\u001b[43mtrace_store_calls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtrace_store_calls\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 67\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m max_cache_size:\n\u001b[1;32m 68\u001b[0m cube_store \u001b[38;5;241m=\u001b[39m zarr\u001b[38;5;241m.\u001b[39mLRUStoreCache(cube_store, max_cache_size)\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/chunkstore.py:570\u001b[0m, in \u001b[0;36mSentinelHubChunkStore.__init__\u001b[0;34m(self, sentinel_hub, cube_config, observer, trace_store_calls)\u001b[0m\n\u001b[1;32m 568\u001b[0m d[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mband_sample_types\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m band_sample_types\n\u001b[1;32m 569\u001b[0m cube_config \u001b[38;5;241m=\u001b[39m CubeConfig\u001b[38;5;241m.\u001b[39mfrom_dict(d)\n\u001b[0;32m--> 570\u001b[0m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__init__\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mcube_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 571\u001b[0m \u001b[43m \u001b[49m\u001b[43mobserver\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mobserver\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 572\u001b[0m \u001b[43m \u001b[49m\u001b[43mtrace_store_calls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtrace_store_calls\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/chunkstore.py:92\u001b[0m, in \u001b[0;36mRemoteStore.__init__\u001b[0;34m(self, cube_config, observer, trace_store_calls)\u001b[0m\n\u001b[1;32m 90\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_observers \u001b[38;5;241m=\u001b[39m [observer] \u001b[38;5;28;01mif\u001b[39;00m observer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m []\n\u001b[1;32m 91\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_trace_store_calls \u001b[38;5;241m=\u001b[39m trace_store_calls\n\u001b[0;32m---> 92\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_time_ranges \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_time_ranges\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 94\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_time_ranges:\n\u001b[1;32m 95\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mCould not determine any valid time stamps\u001b[39m\u001b[38;5;124m'\u001b[39m)\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/chunkstore.py:593\u001b[0m, in \u001b[0;36mSentinelHubChunkStore.get_time_ranges\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 589\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcannot find collection\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 590\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m name for dataset name\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_cube_config\u001b[38;5;241m.\u001b[39mdataset_name\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 592\u001b[0m datetime_format \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mY-\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mm-\u001b[39m\u001b[38;5;132;01m%d\u001b[39;00m\u001b[38;5;124mT\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mH:\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mM:\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mSZ\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m--> 593\u001b[0m features \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_sentinel_hub\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_features\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 594\u001b[0m \u001b[43m \u001b[49m\u001b[43mcollection_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcollection_name\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 595\u001b[0m \u001b[43m \u001b[49m\u001b[43mbbox\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cube_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbbox\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 596\u001b[0m \u001b[43m \u001b[49m\u001b[43mcrs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cube_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcrs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 597\u001b[0m \u001b[43m \u001b[49m\u001b[43mtime_range\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\n\u001b[1;32m 598\u001b[0m \u001b[43m \u001b[49m\u001b[43mtime_start\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstrftime\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdatetime_format\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 599\u001b[0m \u001b[43m \u001b[49m\u001b[43mtime_end\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstrftime\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdatetime_format\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 600\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 601\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 602\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m features:\n\u001b[1;32m 603\u001b[0m \u001b[38;5;66;03m# Maybe the dataset has no data in given time_range\u001b[39;00m\n\u001b[1;32m 604\u001b[0m \u001b[38;5;66;03m# or the dataset has no associated time information\u001b[39;00m\n\u001b[1;32m 605\u001b[0m \u001b[38;5;66;03m# such as some BYOC or DEM.\u001b[39;00m\n\u001b[1;32m 606\u001b[0m \u001b[38;5;66;03m# Repeat query without time_range.\u001b[39;00m\n\u001b[1;32m 607\u001b[0m features \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sentinel_hub\u001b[38;5;241m.\u001b[39mget_features(\n\u001b[1;32m 608\u001b[0m collection_name\u001b[38;5;241m=\u001b[39mcollection_name,\n\u001b[1;32m 609\u001b[0m bbox\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_cube_config\u001b[38;5;241m.\u001b[39mbbox,\n\u001b[1;32m 610\u001b[0m crs\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_cube_config\u001b[38;5;241m.\u001b[39mcrs,\n\u001b[1;32m 611\u001b[0m )\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/sentinelhub.py:265\u001b[0m, in \u001b[0;36mSentinelHub.get_features\u001b[0;34m(self, collection_name, bbox, crs, time_range)\u001b[0m\n\u001b[1;32m 260\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m features_count \u001b[38;5;241m==\u001b[39m max_feature_count:\n\u001b[1;32m 261\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msession\u001b[38;5;241m.\u001b[39mpost(catalog_url,\n\u001b[1;32m 262\u001b[0m json\u001b[38;5;241m=\u001b[39mrequest,\n\u001b[1;32m 263\u001b[0m headers\u001b[38;5;241m=\u001b[39mheaders)\n\u001b[0;32m--> 265\u001b[0m \u001b[43mSentinelHubError\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmaybe_raise_for_response\u001b[49m\u001b[43m(\u001b[49m\u001b[43mresponse\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 267\u001b[0m feature_collection \u001b[38;5;241m=\u001b[39m json\u001b[38;5;241m.\u001b[39mloads(response\u001b[38;5;241m.\u001b[39mcontent)\n\u001b[1;32m 268\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m feature_collection\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtype\u001b[39m\u001b[38;5;124m'\u001b[39m) \u001b[38;5;241m!=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mFeatureCollection\u001b[39m\u001b[38;5;124m'\u001b[39m \\\n\u001b[1;32m 269\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(feature_collection\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mfeatures\u001b[39m\u001b[38;5;124m'\u001b[39m),\n\u001b[1;32m 270\u001b[0m \u001b[38;5;28mlist\u001b[39m):\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/sentinelhub.py:655\u001b[0m, in \u001b[0;36mSentinelHubError.maybe_raise_for_response\u001b[0;34m(cls, response)\u001b[0m\n\u001b[1;32m 653\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[1;32m 654\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n\u001b[0;32m--> 655\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SentinelHubError(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdetail\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m detail \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m,\n\u001b[1;32m 656\u001b[0m response\u001b[38;5;241m=\u001b[39mresponse) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n", "\u001b[0;31mSentinelHubError\u001b[0m: 403 Client Error: Forbidden for url: https://services.sentinel-hub.com/api/v1/catalog/search"]}], "source": ["######################################################\n", "# Density of vessels for each 1km cell\n", "######################################################\n", "\n", "warnings.filterwarnings('ignore')\n", "# generate csv\n", "\n", "gen_date = datetime.datetime.today().strftime('%Y%m%dT%H%M%S')\n", "csv_file = indicator_code + '_' + data_provider + '_' + gen_date + '.csv'\n", "header = ['AOI','Country','Region','City','Site Name','Description','Method','EO Sensor','Input Data', \n", " 'Indicator code','Time','Measurement Value','Reference Description','Reference time','Reference value','Rule', \n", " 'Indicator Value','Sub-AOI','Y axis','Indicator Name','Color code','Data Provider','AOI_ID','Update Frequency']\n", "\n", "if os.path.exists('out/' + csv_file):\n", " os.remove('out/' + csv_file) \n", "with open('out/' + csv_file, 'w', encoding='UTF8') as f:\n", " writer = csv.writer(f)\n", " writer.writerow(header)\n", "\n", "# start processing\n", "\n", " for b in range(len(bbox_list)):\n", " \n", " for month in months:\n", " \n", " \n", " d1 = datetime.datetime(year, month, 1,12,0,0)\n", " d2 = datetime.datetime(year, month, calendar.monthrange(year, month)[1],12,0,0)\n", " time_interval= d1.strftime(\"%Y-%m-%d\"),d2.strftime(\"%Y-%m-%d\")\n", " month = str(month) if (month)>9 else \"0\"+ str(month)\n", " \n", " arr_s1 = []\n", " poly = grid100k[grid100k.Aoi_Id == bbox_list[b]]\n", "\n", " # --------------------------\n", " # csv file\n", " id100 = poly.id_100k.values[0]\n", " aoi_center = str(round(poly.geometry.centroid.values[0].y,6)) + \", \" + str(round(poly.geometry.centroid.values[0].x,6))\n", " co = poly.Country.values[0]\n", " port = poly.Port.values[0]\n", " city = poly.City.values[0]\n", " bbox = str(round(poly.geometry.bounds.values[0][0],3)) + ',' + str(round(poly.geometry.bounds.values[0][1],3)) + ',' + str(round(poly.geometry.bounds.values[0][2],3)) + ',' + str(round(poly.geometry.bounds.values[0][3],3)) \n", "\n", " # ---------------------------------------------\n", " tmp_masks = []\n", " tmp_masks.clear() \n", " # ---------------------------------------------\n", " config_s1 = CubeConfig(dataset_name = 'S1GRD',\n", " band_names = ['VV'],\n", " tile_size = [512, 512],\n", " crs = \"http://www.opengis.net/def/crs/EPSG/0/4326\",\n", " spatial_res = res2, \n", " bbox = bbox,\n", " time_range = time_interval, \n", " ) \n", " s1 = open_cube(config_s1)\n", " n_img = len(s1.VV.time)\n", " print(\"aoi\", bbox_list[b], \" - \",n_img, 'images in the month of', month, '-',year)\n", " print(\" day\") \n", " # ---------------------------------------------\n", " points = MultiPoint([]) \n", " \n", " for j in range(n_img): \n", "\n", " pp1 = pd.DataFrame()\n", " img0 = s1.VV[j]\n", "\n", " # ---------------------------------------------\n", " # for footprints\n", " tmp_mask = img0.notnull() \n", " tmp_mask = tmp_mask.astype(rasterio.uint8)\n", " tmp_mask.rio.write_crs(\"EPSG:4326\", inplace=True)\n", "\n", " tmp_masks.append(tmp_mask)\n", "\n", " # ---------------------------------------------\n", " # define geojson filename\n", " daytime = str(s1.VV.time[j].values).replace(\"-\",\"\").replace(\":\",\"\")\n", " daytime = daytime[:daytime.find(\".\")]\n", " daytime1 = str(s1.VV.time[j].values).replace(\"-\",\"/\").replace(\"T\",\" \")\n", " daytime1 = daytime1[:daytime1.find(\".\")]\n", "\n", " day = daytime[6:11]\n", " hour = int(day[-2:])\n", " hour1 = str(hour) if (hour)>9 else \"0\"+ str(hour)\n", " print(\" \", day, end=\"..\")\n", "\n", "\n", " # mask image with land and fixed semarks (windfarms, ...)\n", " img1 = img0 > 0.7\n", " img2 = img1.astype(rasterio.uint8)\n", " img2.rio.write_crs(\"EPSG:4326\", inplace=True)\n", "\n", " land = gpd.read_file('sea_4326.geojson')\n", " clipped = img2.rio.clip(land.geometry, land.crs, drop=True, invert=False)\n", "\n", " # assess the number of vessels\n", " pp, nvess = vesselShape(clipped) \n", " print(nvess, \"vessels\")\n", "\n", " # ---------------------------\n", " # geojson\n", " # save vessels' centroids from a single image to a geojson file \n", "\n", " if not(pp.is_empty):\n", " if type(pp) == shapely.geometry.point.Point: # only 1 vessel\n", " points1 = [Point(pp.x, pp.y)]\n", " pp1 = gpd.GeoDataFrame(geometry=points1, crs=4326)\n", " pp1.rename(columns ={'geometry':'0'}) \n", " elif len(pp)>0:\n", " pp1 = gpd.GeoDataFrame(pp, geometry=0, crs=4326) #'EPSG:4326')\n", "\n", "\n", " # only if there are vessels\n", " if len(pp1)>0:\n", "\n", " # ===================== csv ========================== #\n", "\n", " daytime2 = daytime1[:10]\n", " daytime2 = daytime2.replace('/', '-') + \"T\" + hour1 + \":00:00Z/\" + daytime2.replace('/', '-') + \"T\" + hour1 + \":59:59Z\"\n", "\n", " # it could happen 2 images in the same day....\n", "\n", " ssat, ddate = sentinelInfo(daytime2, bbox, 1)\n", " input_data = 'S1' + ssat +' - GRD'\n", " measurement_value = len(pp1)\n", " indicator_value = pp.wkt\n", "\n", " #['AOI','Country','Region','City','Site Name','Description','Method','EO Sensor','Input Data', \n", " #'Indicator code','Time','Measurement Value','Reference Description','Reference time','Reference value','Rule', \n", " #'Indicator Value','Sub-AOI','Y axis','Indicator Name','Color code','Data Provider','AOI_ID','Update Frequency']\n", "\n", " info = [aoi_center,co,'',city,port,description,method,eo_sensor,input_data,\n", " indicator_code,ddate,measurement_value,'',ddate,'','','',\n", " poly.geometry.values[0],'',indicator_name,'RED',data_provider,bbox_list[b],update_freq]\n", " writer.writerow(info)\n", "\n", " # ---------------------------\n", "\n", " # geojson \n", "\n", " date_geojson = ddate.replace(\"-\",\"/\").replace(\"T\",\" \")\n", " date_geojson1 = ddate.replace(\"-\",\"\").replace(\":\",\"\")\n", " pp1['TIMESTAMP UTC'] = date_geojson\n", " pp1['CLASSIFICATION'] = \"Vessel\"\n", "\n", " geojson_file = indicator_code + \"_\" + str(bbox_list[b]) + \"_\" + date_geojson1 + \".geojson\"\n", " #print(daytime, geojson_file)\n", "\n", " if os.path.exists('out/' + geojson_file):\n", " os.remove('out/' + geojson_file) \n", " #pp1.to_crs('EPSG:3035').to_file('out/' + geojson_file, driver='GeoJSON')\n", " pp1.to_crs('EPSG:4326').to_file('out/' + geojson_file, driver='GeoJSON')\n", "\n", " with open('out/' + geojson_file, 'r') as file:\n", " filedata = file.read()\n", " filedata = filedata.replace('/', '\\/')\n", " #remove info on crs\n", " filedata = filedata.replace('\"crs\": { \"type\": \"name\", \"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },\\n', '')\n", "\n", " with open('out/' + geojson_file, 'w') as file:\n", " file.write(filedata)\n", " # ---------------------------\n", "\n", " points = points.union(pp) \n", "\n", " else:\n", " #nv = 0\n", " #print(\"n\", end=\"..\")\n", " continue\n", "\n", " else:\n", " #nv = 0\n", " #print(\"n\", end=\"..\")\n", " continue\n", "\n", " # ---------------------------------------------\n", " # for footprints\n", " print(\" footprints\")\n", " mask = xr.concat(tmp_masks, dim='time')\n", " mask2 = mask.sum(dim='time')\n", " mask2 = mask2.astype(rasterio.uint8)\n", " mask2.rio.write_crs(\"EPSG:4326\", inplace=True)\n", "\n", " #vectorize raster of footprints\n", " with rasterio.Env(): \n", " results = (\n", " {'properties': {'raster_val': v}, 'geometry': s}\n", " for i, (s, v) \n", " in enumerate(\n", " shapes(mask2.values, mask=None, connectivity=4, transform=mask2.rio.transform())))\n", " ftprints = list(results)\n", "\n", " #---------------------------------------------------------\n", " # create geojson from footprints\n", "\n", " tmp_list = []\n", "\n", " for p2 in range(len(ftprints)):\n", " val = int(ftprints[p2]['properties']['raster_val'])\n", " ppp2 = shape(ftprints[p2]['geometry']) \n", " tmp_list.append({\n", " 'geometry' : Polygon(ppp2),\n", " 'val': val\n", " })\n", " mpl = gpd.GeoDataFrame(tmp_list, crs='epsg:4326')\n", " mpl.to_file(\"mpl.geojson\", driver='GeoJSON') \n", "\n", " # ---------------------------------------------\n", "\n", " ppp = gpd.GeoDataFrame(points,geometry=0, crs='EPSG:4326')\n", " ppp.insert(1, 't', 0)\n", " ppp.rename_geometry('geometry').to_file(\"vvv.geojson\", driver='GeoJSON')\n", " points3 = ppp.to_crs('EPSG:3035')\n", "\n", " # count vessels for each vector grid with 1-km pixel\n", " \n", " print(\" vessel count\")\n", " grid1k = gpd.read_file('grid1k/'+ bbox_list[b] + '.geojson')\n", " if os.path.exists('fin.geojson'):\n", " os.remove('fin.geojson')\n", " dfsjoin = gpd.sjoin(grid1k, points3,how=\"inner\") #,predicate=\"within\"\n", " dfpivot = pd.pivot_table(dfsjoin,index='id',columns='t',aggfunc={'t':len})\n", " dfpivot.columns = dfpivot.columns.droplevel()\n", " dfpolynew = grid1k.merge(dfpivot, how='left', on='id')\n", " df = dfpolynew.rename(columns ={dfpolynew.columns[-1]:'vessels'})\n", " df.columns = df.columns.astype(str)\n", " df.to_file(\"fin.geojson\", driver='GeoJSON')\n", " \n", " # convert vector grid to raster and calc density\n", " print(\" density\")\n", "\n", " # create rasters of number of vessels and acquisitions\n", " raster1k = 'grid1k/'+ bbox_list[b] + '.tif'\n", " !gdal_calc.py --quiet --overwrite -A $raster1k --outfile=n_vessels.tif --calc=\"0\"\n", " shutil.copy('n_vessels.tif', 'n_acquisitions.tif')\n", " !gdal_rasterize -q -add -a vessels fin.geojson n_vessels.tif\n", " !gdal_rasterize -q -add -a val mpl.geojson n_acquisitions.tif\n", "\n", "\n", " # raster of the vessel density\n", " dens_temp = \"out/density_vessels_temp.tif\"\n", "\n", " dens = \"out/density_vessels_\" + bbox_list[b] + \"_\" + str(year) + str(month) + '01_cog.tiff'\n", " !gdal_calc.py --quiet --overwrite -A n_vessels.tif -B n_acquisitions.tif --outfile=$dens_temp --calc=\"(1000*A)/B\"\n", " !gdal_translate -q -of COG -co COMPRESS=DEFLATE -co BLOCKSIZE=1024 -co RESAMPLING=AVERAGE -co OVERVIEWS=IGNORE_EXISTING -ot Int16 -a_nodata -9999 $dens_temp $dens\n", "\n", "\n", " if os.path.exists(dens_temp):\n", " os.remove(dens_temp)\n", " if os.path.exists('fin.geojson'):\n", " os.remove('fin.geojson')\n", " if os.path.exists('vvv.geojson'):\n", " os.remove('vvv.geojson') \n", " print(\" ok\")\n", "\n", "\n", "print(\"ok\")"]}, {"cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [{"data": {"text/plain": ["'/home/jovyan/output.zip'"]}, "execution_count": 6, "metadata": {}, "output_type": "execute_result"}], "source": ["shutil.make_archive(\"output\",'zip',\"out\")"]}, {"cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [{"data": {"text/plain": ["'/home/jovyan/grid1k.zip'"]}, "execution_count": 13, "metadata": {}, "output_type": "execute_result"}], "source": ["shutil.make_archive(\"grid1k\",'zip',\"grid1k\")"]}, {"cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": []}], "metadata": {"kernelspec": {"display_name": "Python [conda env:edc-default-2022.10-14]", "language": "python", "name": "conda-env-edc-default-2022.10-14-py"}, "language_info": {"codemirror_mode": {"name": "ipython", "version": 3}, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.13"}, "properties": {"id": "9dc6586e-9aea-485e-acac-755f4eb5ef17", "license": "Commercial", "name": "Vessel Density based on Copernicus Sentinel-1", "requirements": ["eurodatacube", "eurodatacube-xcube-gen"], "tags": ["EO Data", "Download Service", "Jupyter", "Sentinel Hub", "Vector Data", "xcube"], "tosAgree": true, "type": "Jupyter Notebook", "version": "0.0.1", "license_price": "5.00", "license_notes": null, "description": "Vessel Density based on Copernicus Sentinel-1", "authors": [{"id": "d8a2b943-31ef-402a-a1c9-315163cc36f1", "name": "ryy83337@xcoxc.com"}]}}, "nbformat": 4, "nbformat_minor": 4} \ No newline at end of file From ef40e7ba068980cb3c792bdcb42d50a24a577748 Mon Sep 17 00:00:00 2001 From: eurodatacube-submissions <61697821+eurodatacube-submissions@users.noreply.github.com> Date: Tue, 31 Jan 2023 19:06:14 +0100 Subject: [PATCH 2/3] Add executed notebook under notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb [skip ci] --- .../Vessel_Density_from_Sentinel-1.ipynb | 874 +++++++++++++++++- 1 file changed, 873 insertions(+), 1 deletion(-) diff --git a/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb b/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb index bd4041fc..eb87e191 100644 --- a/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb +++ b/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb @@ -1 +1,873 @@ -{"cells": [{"cell_type": "markdown", "metadata": {}, "source": ["# Vessel Density based on Copernicus Sentinel-1\n", "---\n", "_Author: Alessandro Cimbelli, Oct 2022_\n", "\n", "In this updated proposal, the marine area of interest is divided and analysed into areas of 100 km x 100 km\n", "#### Methodology \n", "1. Definition of the boxes ID to be analysed.\n", "2. Extraction of the footprints of the Sentinel-1 (VV band) images acquired in the time period (one month).\n", "3. Extraction of vessel centroids in each image\n", "4. Map of vessel density for each 1-km grid tile of the selected area\n", "5. Production of geojson and csv files\n"]}, {"cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": ["year = 2020\n", "months = [1,2,3,4,5,6,7,8,9,10,11,12]"]}, {"cell_type": "code", "execution_count": 2, "metadata": {"tags": []}, "outputs": [], "source": ["#from edc import check_compatibility\n", "#check_compatibility(\"user-0.24.5\")"]}, {"cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": ["#from edc import setup_environment_variables\n", "#setup_environment_variables()"]}, {"cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": ["from xcube_sh.config import CubeConfig\n", "from xcube_sh.cube import open_cube\n", "from xcube_sh.sentinelhub import SentinelHub\n", "from xcube.core.geom import (\n", " clip_dataset_by_geometry, \n", " mask_dataset_by_geometry,\n", " convert_geometry,\n", " rasterize_features\n", ")\n", "import xarray as xr\n", "import numpy as np\n", "import shapely.geometry\n", "import IPython.display\n", "import calendar\n", "\n", "import datetime\n", "import os, sys\n", "%matplotlib inline\n", "import matplotlib.pyplot as plt\n", "import rasterio, rioxarray\n", "import geopandas as gpd\n", "import pandas as pd\n", "import warnings\n", "import scipy\n", "import pyproj\n", "from shapely.ops import transform, cascaded_union, unary_union\n", "from shapely.geometry import shape, Point, Polygon, LineString, MultiPoint, MultiPolygon, mapping \n", "from rasterio.features import sieve, shapes\n", "from rasterio.plot import show, plotting_extent\n", "from rasterio.enums import Resampling\n", "import json\n", "from oauthlib.oauth2 import BackendApplicationClient\n", "from requests_oauthlib import OAuth2Session\n", "import requests\n", "import csv\n", "import shutil"]}, {"cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": ["# Your client credentials\n", "client_id = os.environ['SH_CLIENT_ID']\n", "client_secret = os.environ['SH_CLIENT_SECRET']\n", "\n", "grid_name = \"grid100k_geo.geojson\"\n", "grid100k = gpd.read_file(grid_name)\n", "\n", "res1 = 0.0054 # 0.00054 # lower resolution for satellite footprints\n", "res2 = 0.00018 # Higher resolution for vessel shape and number\n", "#dres = (res1-res2)/2\n", "###############################\n", "# csv parameters\n", "eo_sensor = 'Sentinel-1'\n", "indicator_code = 'E1b'\n", "y_axis = 'Vessel density'\n", "indicator_name = 'Vessel density'\n", "data_provider = \"cimbelli\"\n", "description = 'Number of vessels in the AOI'\n", "method = 'Object detection of vessels from Sentinel-1 images'\n", "update_freq = 'Monthly'"]}, {"cell_type": "code", "execution_count": 6, "metadata": {"tags": []}, "outputs": [], "source": ["# This function extracts the centroids of all the vessels in an image\n", "def vesselShape(imgtif):\n", "\n", " img1tmp = imgtif.astype(rasterio.uint8)\n", " img2tmp = rasterio.features.sieve(img1tmp.to_numpy(), 15, connectivity=8)\n", " \n", " with rasterio.Env():\n", " image = img2tmp #img_vessels.read(1) # first band\n", " results = (\n", " {'properties': {'raster_val': v}, 'geometry': s}\n", " for i, (s, v) \n", " in enumerate(\n", " shapes(image, mask=None, transform=imgtif.rio.transform())))\n", " geoms = list(results)\n", " npoly = len(geoms)\n", " npoly_1 = 0\n", " \n", "# for each vessel get shape information\n", " \n", " ppc = MultiPoint([])\n", " if npoly > 0:\n", " for p in range(npoly):\n", " val = int(geoms[p]['properties']['raster_val'])\n", "\n", " if val==1:\n", " npoly_1 += 1\n", " pp = shape(geoms[p]['geometry'])\n", " pp_centroids = pp.centroid #.coords \n", " ppc = ppc.union(pp_centroids) \n", " return ppc, npoly_1\n", " else:\n", " return None, 0\n", " "]}, {"cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": ["def sentinelInfo(data, box, n):\n", "\n", " # Create a session\n", " client = BackendApplicationClient(client_id=client_id)\n", " oauth = OAuth2Session(client=client)\n", "\n", " # Get token for the session\n", " token = oauth.fetch_token(token_url='https://services.sentinel-hub.com/oauth/token', client_secret=client_secret)\n", "\n", " # All requests using this session will have an access token automatically added\n", " resp = oauth.get(\"https://services.sentinel-hub.com/oauth/tokeninfo\") \n", " token1 = token['access_token']\n", " \n", " data = '{\"collections\":[\"sentinel-1-grd\"], \"datetime\":\"' + data + '\",\"bbox\": [' + box + '] ,\"limit\": ' + str(n) + '}' # 2020-01-01T00:00:00Z/2020-01-31T23:59:59Z - 10,54.27,11,54.6\n", " headers = {\"Content-Type\": \"application/json\" , \"Authorization\": \"Bearer \" + token1 }\n", " endpoint = \"https://services.sentinel-hub.com/api/v1/catalog/search\"\n", " \n", " ggg = requests.post(endpoint, data = data, headers=headers)\n", " \n", " res = json.loads(ggg.content.decode('UTF-8'))\n", " id = res['features'][0]['id']\n", " sat = id[2:3]\n", " \n", " ddatetime = res['features'][0]['properties']['datetime'][:-1]\n", " \n", " return sat, ddatetime \n", " "]}, {"cell_type": "markdown", "metadata": {}, "source": ["### Definition of the boxes ID to be analysed. Please edit them in the list below the map.\n", "\n", "_The mask of all European water areas has been produced from the coastline shapefile available at the page:_\n", "\n", "https://www.eea.europa.eu/data-and-maps/data/eea-coastline-for-analysis-1/gis-data/europe-coastline-shapefile\n"]}, {"cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [{"data": {"text/html": ["\n"], "text/plain": [""]}, "metadata": {}, "output_type": "display_data"}], "source": ["%%html\n", ""]}, {"cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [{"data": {"application/vnd.jupyter.widget-view+json": {"model_id": "1d9edb3e66bc433ca48105d2300abb11", "version_major": 2, "version_minor": 0}, "text/plain": ["Map(center=[49.0, 9.0], controls=(ZoomControl(options=['position', 'zoom_in_text', 'zoom_in_title', 'zoom_out_\u2026"]}, "execution_count": 9, "metadata": {}, "output_type": "execute_result"}], "source": ["with open('grid100k_geo.geojson', 'r') as f:\n", " data = json.load(f)\n", "\n", "from ipyleaflet import Map, basemaps, basemap_to_tiles, GeoJSON, Marker, DivIcon, WidgetControl\n", "m = Map(\n", " basemap = basemap_to_tiles(basemaps.OpenStreetMap.Mapnik),\n", " center = (49.0, 9.0),\n", " zoom = 5\n", " )\n", "geo_json = GeoJSON(\n", " data = data,\n", " style = {'opacity':1, 'fillOpacity':0.1, 'weight':1, 'fillColor':'green'},\n", " hover_style = {'color':'blue', 'fillOpacity':0.5, 'weight':1.2}\n", " )\n", "for i in range(0,len(grid100k)):\n", " extx = grid100k.iloc[i][\"geometry\"].bounds[2]-grid100k.iloc[i][\"geometry\"].bounds[0]\n", " lo = grid100k.iloc[i][\"geometry\"].bounds[0] + extx*0.25 #grid100k.iloc[i][\"geometry\"].centroid.x\n", " la = grid100k.iloc[i][\"geometry\"].centroid.y\n", " marker = Marker(\n", " location = [la, lo], \n", " icon = DivIcon(html=f\"\"\"
{grid100k.iloc[i]['Aoi_Id']}
\"\"\",),\n", " draggable=False\n", " )\n", " m.add_layer(marker)\n", "m.add_layer(geo_json)\n", "m.layout.height = '800px'\n", "m"]}, {"cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": ["# TILE LIST\n", "\n", "# 'BE1','DE1','DE2','DK2','EE1','GR7',\n", "# 'ES1','ES18','ES3','ES4','ES5','FR2',\n", "# 'FR3','FR4','IE2','IT3','IT16','IT17',\n", "# 'IT18','IT19','LT1','LV1','NL3','NL4',\n", "# 'NO3','NO4','PL1','PT2','RO1','RU1',\n", "# 'TR1','TR2'\n", "\n", "bbox_list = ('NL3',)\n", "\n"]}, {"cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [{"ename": "SentinelHubError", "evalue": "403 Client Error: Forbidden for url: https://services.sentinel-hub.com/api/v1/catalog/search", "output_type": "error", "traceback": ["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mHTTPError\u001b[0m Traceback (most recent call last)", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/sentinelhub.py:645\u001b[0m, in \u001b[0;36mSentinelHubError.maybe_raise_for_response\u001b[0;34m(cls, response)\u001b[0m\n\u001b[1;32m 644\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 645\u001b[0m \u001b[43mresponse\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mraise_for_status\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 646\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m requests\u001b[38;5;241m.\u001b[39mHTTPError \u001b[38;5;28;01mas\u001b[39;00m e:\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/requests/models.py:1021\u001b[0m, in \u001b[0;36mResponse.raise_for_status\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1020\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m http_error_msg:\n\u001b[0;32m-> 1021\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m HTTPError(http_error_msg, response\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m)\n", "\u001b[0;31mHTTPError\u001b[0m: 403 Client Error: Forbidden for url: https://services.sentinel-hub.com/api/v1/catalog/search", "\nThe above exception was the direct cause of the following exception:\n", "\u001b[0;31mSentinelHubError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn [11], line 56\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[38;5;66;03m# ---------------------------------------------\u001b[39;00m\n\u001b[1;32m 48\u001b[0m config_s1 \u001b[38;5;241m=\u001b[39m CubeConfig(dataset_name \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mS1GRD\u001b[39m\u001b[38;5;124m'\u001b[39m,\n\u001b[1;32m 49\u001b[0m band_names \u001b[38;5;241m=\u001b[39m [\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mVV\u001b[39m\u001b[38;5;124m'\u001b[39m],\n\u001b[1;32m 50\u001b[0m tile_size \u001b[38;5;241m=\u001b[39m [\u001b[38;5;241m512\u001b[39m, \u001b[38;5;241m512\u001b[39m],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 54\u001b[0m time_range \u001b[38;5;241m=\u001b[39m time_interval, \n\u001b[1;32m 55\u001b[0m ) \n\u001b[0;32m---> 56\u001b[0m s1 \u001b[38;5;241m=\u001b[39m \u001b[43mopen_cube\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconfig_s1\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 57\u001b[0m n_img \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlen\u001b[39m(s1\u001b[38;5;241m.\u001b[39mVV\u001b[38;5;241m.\u001b[39mtime)\n\u001b[1;32m 58\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124maoi\u001b[39m\u001b[38;5;124m\"\u001b[39m, bbox_list[b], \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m - \u001b[39m\u001b[38;5;124m\"\u001b[39m,n_img, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mimages in the month of\u001b[39m\u001b[38;5;124m'\u001b[39m, month, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m-\u001b[39m\u001b[38;5;124m'\u001b[39m,year)\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/cube.py:63\u001b[0m, in \u001b[0;36mopen_cube\u001b[0;34m(cube_config, observer, trace_store_calls, max_cache_size, sentinel_hub, **sh_kwargs)\u001b[0m\n\u001b[1;32m 60\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m sh_kwargs:\n\u001b[1;32m 61\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124munexpected keyword-arguments:\u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 62\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(sh_kwargs\u001b[38;5;241m.\u001b[39mkeys())\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m---> 63\u001b[0m cube_store \u001b[38;5;241m=\u001b[39m \u001b[43mSentinelHubChunkStore\u001b[49m\u001b[43m(\u001b[49m\u001b[43msentinel_hub\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 64\u001b[0m \u001b[43m \u001b[49m\u001b[43mcube_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 65\u001b[0m \u001b[43m \u001b[49m\u001b[43mobserver\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mobserver\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 66\u001b[0m \u001b[43m \u001b[49m\u001b[43mtrace_store_calls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtrace_store_calls\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 67\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m max_cache_size:\n\u001b[1;32m 68\u001b[0m cube_store \u001b[38;5;241m=\u001b[39m zarr\u001b[38;5;241m.\u001b[39mLRUStoreCache(cube_store, max_cache_size)\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/chunkstore.py:570\u001b[0m, in \u001b[0;36mSentinelHubChunkStore.__init__\u001b[0;34m(self, sentinel_hub, cube_config, observer, trace_store_calls)\u001b[0m\n\u001b[1;32m 568\u001b[0m d[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mband_sample_types\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m band_sample_types\n\u001b[1;32m 569\u001b[0m cube_config \u001b[38;5;241m=\u001b[39m CubeConfig\u001b[38;5;241m.\u001b[39mfrom_dict(d)\n\u001b[0;32m--> 570\u001b[0m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__init__\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mcube_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 571\u001b[0m \u001b[43m \u001b[49m\u001b[43mobserver\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mobserver\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 572\u001b[0m \u001b[43m \u001b[49m\u001b[43mtrace_store_calls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtrace_store_calls\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/chunkstore.py:92\u001b[0m, in \u001b[0;36mRemoteStore.__init__\u001b[0;34m(self, cube_config, observer, trace_store_calls)\u001b[0m\n\u001b[1;32m 90\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_observers \u001b[38;5;241m=\u001b[39m [observer] \u001b[38;5;28;01mif\u001b[39;00m observer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m []\n\u001b[1;32m 91\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_trace_store_calls \u001b[38;5;241m=\u001b[39m trace_store_calls\n\u001b[0;32m---> 92\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_time_ranges \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_time_ranges\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 94\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_time_ranges:\n\u001b[1;32m 95\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mCould not determine any valid time stamps\u001b[39m\u001b[38;5;124m'\u001b[39m)\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/chunkstore.py:593\u001b[0m, in \u001b[0;36mSentinelHubChunkStore.get_time_ranges\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 589\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcannot find collection\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 590\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m name for dataset name\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_cube_config\u001b[38;5;241m.\u001b[39mdataset_name\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 592\u001b[0m datetime_format \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mY-\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mm-\u001b[39m\u001b[38;5;132;01m%d\u001b[39;00m\u001b[38;5;124mT\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mH:\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mM:\u001b[39m\u001b[38;5;124m%\u001b[39m\u001b[38;5;124mSZ\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m--> 593\u001b[0m features \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_sentinel_hub\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_features\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 594\u001b[0m \u001b[43m \u001b[49m\u001b[43mcollection_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcollection_name\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 595\u001b[0m \u001b[43m \u001b[49m\u001b[43mbbox\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cube_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbbox\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 596\u001b[0m \u001b[43m \u001b[49m\u001b[43mcrs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cube_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcrs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 597\u001b[0m \u001b[43m \u001b[49m\u001b[43mtime_range\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\n\u001b[1;32m 598\u001b[0m \u001b[43m \u001b[49m\u001b[43mtime_start\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstrftime\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdatetime_format\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 599\u001b[0m \u001b[43m \u001b[49m\u001b[43mtime_end\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstrftime\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdatetime_format\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 600\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 601\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 602\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m features:\n\u001b[1;32m 603\u001b[0m \u001b[38;5;66;03m# Maybe the dataset has no data in given time_range\u001b[39;00m\n\u001b[1;32m 604\u001b[0m \u001b[38;5;66;03m# or the dataset has no associated time information\u001b[39;00m\n\u001b[1;32m 605\u001b[0m \u001b[38;5;66;03m# such as some BYOC or DEM.\u001b[39;00m\n\u001b[1;32m 606\u001b[0m \u001b[38;5;66;03m# Repeat query without time_range.\u001b[39;00m\n\u001b[1;32m 607\u001b[0m features \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sentinel_hub\u001b[38;5;241m.\u001b[39mget_features(\n\u001b[1;32m 608\u001b[0m collection_name\u001b[38;5;241m=\u001b[39mcollection_name,\n\u001b[1;32m 609\u001b[0m bbox\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_cube_config\u001b[38;5;241m.\u001b[39mbbox,\n\u001b[1;32m 610\u001b[0m crs\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_cube_config\u001b[38;5;241m.\u001b[39mcrs,\n\u001b[1;32m 611\u001b[0m )\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/sentinelhub.py:265\u001b[0m, in \u001b[0;36mSentinelHub.get_features\u001b[0;34m(self, collection_name, bbox, crs, time_range)\u001b[0m\n\u001b[1;32m 260\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m features_count \u001b[38;5;241m==\u001b[39m max_feature_count:\n\u001b[1;32m 261\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msession\u001b[38;5;241m.\u001b[39mpost(catalog_url,\n\u001b[1;32m 262\u001b[0m json\u001b[38;5;241m=\u001b[39mrequest,\n\u001b[1;32m 263\u001b[0m headers\u001b[38;5;241m=\u001b[39mheaders)\n\u001b[0;32m--> 265\u001b[0m \u001b[43mSentinelHubError\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmaybe_raise_for_response\u001b[49m\u001b[43m(\u001b[49m\u001b[43mresponse\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 267\u001b[0m feature_collection \u001b[38;5;241m=\u001b[39m json\u001b[38;5;241m.\u001b[39mloads(response\u001b[38;5;241m.\u001b[39mcontent)\n\u001b[1;32m 268\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m feature_collection\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtype\u001b[39m\u001b[38;5;124m'\u001b[39m) \u001b[38;5;241m!=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mFeatureCollection\u001b[39m\u001b[38;5;124m'\u001b[39m \\\n\u001b[1;32m 269\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(feature_collection\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mfeatures\u001b[39m\u001b[38;5;124m'\u001b[39m),\n\u001b[1;32m 270\u001b[0m \u001b[38;5;28mlist\u001b[39m):\n", "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/xcube_sh/sentinelhub.py:655\u001b[0m, in \u001b[0;36mSentinelHubError.maybe_raise_for_response\u001b[0;34m(cls, response)\u001b[0m\n\u001b[1;32m 653\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[1;32m 654\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n\u001b[0;32m--> 655\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SentinelHubError(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdetail\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m detail \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m,\n\u001b[1;32m 656\u001b[0m response\u001b[38;5;241m=\u001b[39mresponse) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n", "\u001b[0;31mSentinelHubError\u001b[0m: 403 Client Error: Forbidden for url: https://services.sentinel-hub.com/api/v1/catalog/search"]}], "source": ["######################################################\n", "# Density of vessels for each 1km cell\n", "######################################################\n", "\n", "warnings.filterwarnings('ignore')\n", "# generate csv\n", "\n", "gen_date = datetime.datetime.today().strftime('%Y%m%dT%H%M%S')\n", "csv_file = indicator_code + '_' + data_provider + '_' + gen_date + '.csv'\n", "header = ['AOI','Country','Region','City','Site Name','Description','Method','EO Sensor','Input Data', \n", " 'Indicator code','Time','Measurement Value','Reference Description','Reference time','Reference value','Rule', \n", " 'Indicator Value','Sub-AOI','Y axis','Indicator Name','Color code','Data Provider','AOI_ID','Update Frequency']\n", "\n", "if os.path.exists('out/' + csv_file):\n", " os.remove('out/' + csv_file) \n", "with open('out/' + csv_file, 'w', encoding='UTF8') as f:\n", " writer = csv.writer(f)\n", " writer.writerow(header)\n", "\n", "# start processing\n", "\n", " for b in range(len(bbox_list)):\n", " \n", " for month in months:\n", " \n", " \n", " d1 = datetime.datetime(year, month, 1,12,0,0)\n", " d2 = datetime.datetime(year, month, calendar.monthrange(year, month)[1],12,0,0)\n", " time_interval= d1.strftime(\"%Y-%m-%d\"),d2.strftime(\"%Y-%m-%d\")\n", " month = str(month) if (month)>9 else \"0\"+ str(month)\n", " \n", " arr_s1 = []\n", " poly = grid100k[grid100k.Aoi_Id == bbox_list[b]]\n", "\n", " # --------------------------\n", " # csv file\n", " id100 = poly.id_100k.values[0]\n", " aoi_center = str(round(poly.geometry.centroid.values[0].y,6)) + \", \" + str(round(poly.geometry.centroid.values[0].x,6))\n", " co = poly.Country.values[0]\n", " port = poly.Port.values[0]\n", " city = poly.City.values[0]\n", " bbox = str(round(poly.geometry.bounds.values[0][0],3)) + ',' + str(round(poly.geometry.bounds.values[0][1],3)) + ',' + str(round(poly.geometry.bounds.values[0][2],3)) + ',' + str(round(poly.geometry.bounds.values[0][3],3)) \n", "\n", " # ---------------------------------------------\n", " tmp_masks = []\n", " tmp_masks.clear() \n", " # ---------------------------------------------\n", " config_s1 = CubeConfig(dataset_name = 'S1GRD',\n", " band_names = ['VV'],\n", " tile_size = [512, 512],\n", " crs = \"http://www.opengis.net/def/crs/EPSG/0/4326\",\n", " spatial_res = res2, \n", " bbox = bbox,\n", " time_range = time_interval, \n", " ) \n", " s1 = open_cube(config_s1)\n", " n_img = len(s1.VV.time)\n", " print(\"aoi\", bbox_list[b], \" - \",n_img, 'images in the month of', month, '-',year)\n", " print(\" day\") \n", " # ---------------------------------------------\n", " points = MultiPoint([]) \n", " \n", " for j in range(n_img): \n", "\n", " pp1 = pd.DataFrame()\n", " img0 = s1.VV[j]\n", "\n", " # ---------------------------------------------\n", " # for footprints\n", " tmp_mask = img0.notnull() \n", " tmp_mask = tmp_mask.astype(rasterio.uint8)\n", " tmp_mask.rio.write_crs(\"EPSG:4326\", inplace=True)\n", "\n", " tmp_masks.append(tmp_mask)\n", "\n", " # ---------------------------------------------\n", " # define geojson filename\n", " daytime = str(s1.VV.time[j].values).replace(\"-\",\"\").replace(\":\",\"\")\n", " daytime = daytime[:daytime.find(\".\")]\n", " daytime1 = str(s1.VV.time[j].values).replace(\"-\",\"/\").replace(\"T\",\" \")\n", " daytime1 = daytime1[:daytime1.find(\".\")]\n", "\n", " day = daytime[6:11]\n", " hour = int(day[-2:])\n", " hour1 = str(hour) if (hour)>9 else \"0\"+ str(hour)\n", " print(\" \", day, end=\"..\")\n", "\n", "\n", " # mask image with land and fixed semarks (windfarms, ...)\n", " img1 = img0 > 0.7\n", " img2 = img1.astype(rasterio.uint8)\n", " img2.rio.write_crs(\"EPSG:4326\", inplace=True)\n", "\n", " land = gpd.read_file('sea_4326.geojson')\n", " clipped = img2.rio.clip(land.geometry, land.crs, drop=True, invert=False)\n", "\n", " # assess the number of vessels\n", " pp, nvess = vesselShape(clipped) \n", " print(nvess, \"vessels\")\n", "\n", " # ---------------------------\n", " # geojson\n", " # save vessels' centroids from a single image to a geojson file \n", "\n", " if not(pp.is_empty):\n", " if type(pp) == shapely.geometry.point.Point: # only 1 vessel\n", " points1 = [Point(pp.x, pp.y)]\n", " pp1 = gpd.GeoDataFrame(geometry=points1, crs=4326)\n", " pp1.rename(columns ={'geometry':'0'}) \n", " elif len(pp)>0:\n", " pp1 = gpd.GeoDataFrame(pp, geometry=0, crs=4326) #'EPSG:4326')\n", "\n", "\n", " # only if there are vessels\n", " if len(pp1)>0:\n", "\n", " # ===================== csv ========================== #\n", "\n", " daytime2 = daytime1[:10]\n", " daytime2 = daytime2.replace('/', '-') + \"T\" + hour1 + \":00:00Z/\" + daytime2.replace('/', '-') + \"T\" + hour1 + \":59:59Z\"\n", "\n", " # it could happen 2 images in the same day....\n", "\n", " ssat, ddate = sentinelInfo(daytime2, bbox, 1)\n", " input_data = 'S1' + ssat +' - GRD'\n", " measurement_value = len(pp1)\n", " indicator_value = pp.wkt\n", "\n", " #['AOI','Country','Region','City','Site Name','Description','Method','EO Sensor','Input Data', \n", " #'Indicator code','Time','Measurement Value','Reference Description','Reference time','Reference value','Rule', \n", " #'Indicator Value','Sub-AOI','Y axis','Indicator Name','Color code','Data Provider','AOI_ID','Update Frequency']\n", "\n", " info = [aoi_center,co,'',city,port,description,method,eo_sensor,input_data,\n", " indicator_code,ddate,measurement_value,'',ddate,'','','',\n", " poly.geometry.values[0],'',indicator_name,'RED',data_provider,bbox_list[b],update_freq]\n", " writer.writerow(info)\n", "\n", " # ---------------------------\n", "\n", " # geojson \n", "\n", " date_geojson = ddate.replace(\"-\",\"/\").replace(\"T\",\" \")\n", " date_geojson1 = ddate.replace(\"-\",\"\").replace(\":\",\"\")\n", " pp1['TIMESTAMP UTC'] = date_geojson\n", " pp1['CLASSIFICATION'] = \"Vessel\"\n", "\n", " geojson_file = indicator_code + \"_\" + str(bbox_list[b]) + \"_\" + date_geojson1 + \".geojson\"\n", " #print(daytime, geojson_file)\n", "\n", " if os.path.exists('out/' + geojson_file):\n", " os.remove('out/' + geojson_file) \n", " #pp1.to_crs('EPSG:3035').to_file('out/' + geojson_file, driver='GeoJSON')\n", " pp1.to_crs('EPSG:4326').to_file('out/' + geojson_file, driver='GeoJSON')\n", "\n", " with open('out/' + geojson_file, 'r') as file:\n", " filedata = file.read()\n", " filedata = filedata.replace('/', '\\/')\n", " #remove info on crs\n", " filedata = filedata.replace('\"crs\": { \"type\": \"name\", \"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },\\n', '')\n", "\n", " with open('out/' + geojson_file, 'w') as file:\n", " file.write(filedata)\n", " # ---------------------------\n", "\n", " points = points.union(pp) \n", "\n", " else:\n", " #nv = 0\n", " #print(\"n\", end=\"..\")\n", " continue\n", "\n", " else:\n", " #nv = 0\n", " #print(\"n\", end=\"..\")\n", " continue\n", "\n", " # ---------------------------------------------\n", " # for footprints\n", " print(\" footprints\")\n", " mask = xr.concat(tmp_masks, dim='time')\n", " mask2 = mask.sum(dim='time')\n", " mask2 = mask2.astype(rasterio.uint8)\n", " mask2.rio.write_crs(\"EPSG:4326\", inplace=True)\n", "\n", " #vectorize raster of footprints\n", " with rasterio.Env(): \n", " results = (\n", " {'properties': {'raster_val': v}, 'geometry': s}\n", " for i, (s, v) \n", " in enumerate(\n", " shapes(mask2.values, mask=None, connectivity=4, transform=mask2.rio.transform())))\n", " ftprints = list(results)\n", "\n", " #---------------------------------------------------------\n", " # create geojson from footprints\n", "\n", " tmp_list = []\n", "\n", " for p2 in range(len(ftprints)):\n", " val = int(ftprints[p2]['properties']['raster_val'])\n", " ppp2 = shape(ftprints[p2]['geometry']) \n", " tmp_list.append({\n", " 'geometry' : Polygon(ppp2),\n", " 'val': val\n", " })\n", " mpl = gpd.GeoDataFrame(tmp_list, crs='epsg:4326')\n", " mpl.to_file(\"mpl.geojson\", driver='GeoJSON') \n", "\n", " # ---------------------------------------------\n", "\n", " ppp = gpd.GeoDataFrame(points,geometry=0, crs='EPSG:4326')\n", " ppp.insert(1, 't', 0)\n", " ppp.rename_geometry('geometry').to_file(\"vvv.geojson\", driver='GeoJSON')\n", " points3 = ppp.to_crs('EPSG:3035')\n", "\n", " # count vessels for each vector grid with 1-km pixel\n", " \n", " print(\" vessel count\")\n", " grid1k = gpd.read_file('grid1k/'+ bbox_list[b] + '.geojson')\n", " if os.path.exists('fin.geojson'):\n", " os.remove('fin.geojson')\n", " dfsjoin = gpd.sjoin(grid1k, points3,how=\"inner\") #,predicate=\"within\"\n", " dfpivot = pd.pivot_table(dfsjoin,index='id',columns='t',aggfunc={'t':len})\n", " dfpivot.columns = dfpivot.columns.droplevel()\n", " dfpolynew = grid1k.merge(dfpivot, how='left', on='id')\n", " df = dfpolynew.rename(columns ={dfpolynew.columns[-1]:'vessels'})\n", " df.columns = df.columns.astype(str)\n", " df.to_file(\"fin.geojson\", driver='GeoJSON')\n", " \n", " # convert vector grid to raster and calc density\n", " print(\" density\")\n", "\n", " # create rasters of number of vessels and acquisitions\n", " raster1k = 'grid1k/'+ bbox_list[b] + '.tif'\n", " !gdal_calc.py --quiet --overwrite -A $raster1k --outfile=n_vessels.tif --calc=\"0\"\n", " shutil.copy('n_vessels.tif', 'n_acquisitions.tif')\n", " !gdal_rasterize -q -add -a vessels fin.geojson n_vessels.tif\n", " !gdal_rasterize -q -add -a val mpl.geojson n_acquisitions.tif\n", "\n", "\n", " # raster of the vessel density\n", " dens_temp = \"out/density_vessels_temp.tif\"\n", "\n", " dens = \"out/density_vessels_\" + bbox_list[b] + \"_\" + str(year) + str(month) + '01_cog.tiff'\n", " !gdal_calc.py --quiet --overwrite -A n_vessels.tif -B n_acquisitions.tif --outfile=$dens_temp --calc=\"(1000*A)/B\"\n", " !gdal_translate -q -of COG -co COMPRESS=DEFLATE -co BLOCKSIZE=1024 -co RESAMPLING=AVERAGE -co OVERVIEWS=IGNORE_EXISTING -ot Int16 -a_nodata -9999 $dens_temp $dens\n", "\n", "\n", " if os.path.exists(dens_temp):\n", " os.remove(dens_temp)\n", " if os.path.exists('fin.geojson'):\n", " os.remove('fin.geojson')\n", " if os.path.exists('vvv.geojson'):\n", " os.remove('vvv.geojson') \n", " print(\" ok\")\n", "\n", "\n", "print(\"ok\")"]}, {"cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [{"data": {"text/plain": ["'/home/jovyan/output.zip'"]}, "execution_count": 6, "metadata": {}, "output_type": "execute_result"}], "source": ["shutil.make_archive(\"output\",'zip',\"out\")"]}, {"cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [{"data": {"text/plain": ["'/home/jovyan/grid1k.zip'"]}, "execution_count": 13, "metadata": {}, "output_type": "execute_result"}], "source": ["shutil.make_archive(\"grid1k\",'zip',\"grid1k\")"]}, {"cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": []}], "metadata": {"kernelspec": {"display_name": "Python [conda env:edc-default-2022.10-14]", "language": "python", "name": "conda-env-edc-default-2022.10-14-py"}, "language_info": {"codemirror_mode": {"name": "ipython", "version": 3}, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.13"}, "properties": {"id": "9dc6586e-9aea-485e-acac-755f4eb5ef17", "license": "Commercial", "name": "Vessel Density based on Copernicus Sentinel-1", "requirements": ["eurodatacube", "eurodatacube-xcube-gen"], "tags": ["EO Data", "Download Service", "Jupyter", "Sentinel Hub", "Vector Data", "xcube"], "tosAgree": true, "type": "Jupyter Notebook", "version": "0.0.1", "license_price": "5.00", "license_notes": null, "description": "Vessel Density based on Copernicus Sentinel-1", "authors": [{"id": "d8a2b943-31ef-402a-a1c9-315163cc36f1", "name": "ryy83337@xcoxc.com"}]}}, "nbformat": 4, "nbformat_minor": 4} \ No newline at end of file +{ + "cells": [ + { + "cell_type": "markdown", + "id": "53f2e93e", + "metadata": { + "tags": [ + "papermill-error-cell-tag" + ] + }, + "source": [ + "An Exception was encountered at 'In [5]'." + ] + }, + { + "cell_type": "markdown", + "id": "fd59ebb0", + "metadata": { + "papermill": { + "duration": 0.005329, + "end_time": "2023-01-31T18:06:08.716096", + "exception": false, + "start_time": "2023-01-31T18:06:08.710767", + "status": "completed" + }, + "tags": [] + }, + "source": [ + "# Vessel Density based on Copernicus Sentinel-1\n", + "---\n", + "_Author: Alessandro Cimbelli, Oct 2022_\n", + "\n", + "In this updated proposal, the marine area of interest is divided and analysed into areas of 100 km x 100 km\n", + "#### Methodology \n", + "1. Definition of the boxes ID to be analysed.\n", + "2. Extraction of the footprints of the Sentinel-1 (VV band) images acquired in the time period (one month).\n", + "3. Extraction of vessel centroids in each image\n", + "4. Map of vessel density for each 1-km grid tile of the selected area\n", + "5. Production of geojson and csv files\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "269da3e9", + "metadata": { + "execution": { + "iopub.execute_input": "2023-01-31T18:06:08.726356Z", + "iopub.status.busy": "2023-01-31T18:06:08.725705Z", + "iopub.status.idle": "2023-01-31T18:06:08.735822Z", + "shell.execute_reply": "2023-01-31T18:06:08.735015Z" + }, + "papermill": { + "duration": 0.017853, + "end_time": "2023-01-31T18:06:08.737704", + "exception": false, + "start_time": "2023-01-31T18:06:08.719851", + "status": "completed" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "year = 2020\n", + "months = [1,2,3,4,5,6,7,8,9,10,11,12]" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "7b1a4422", + "metadata": { + "execution": { + "iopub.execute_input": "2023-01-31T18:06:08.746698Z", + "iopub.status.busy": "2023-01-31T18:06:08.746274Z", + "iopub.status.idle": "2023-01-31T18:06:08.750335Z", + "shell.execute_reply": "2023-01-31T18:06:08.749597Z" + }, + "papermill": { + "duration": 0.01053, + "end_time": "2023-01-31T18:06:08.752056", + "exception": false, + "start_time": "2023-01-31T18:06:08.741526", + "status": "completed" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "#from edc import check_compatibility\n", + "#check_compatibility(\"user-2022.10-14\", dependencies=[\"SH\", \"XCUBE_GEN\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4b5a4898", + "metadata": { + "execution": { + "iopub.execute_input": "2023-01-31T18:06:08.760844Z", + "iopub.status.busy": "2023-01-31T18:06:08.760513Z", + "iopub.status.idle": "2023-01-31T18:06:08.764351Z", + "shell.execute_reply": "2023-01-31T18:06:08.763575Z" + }, + "papermill": { + "duration": 0.01016, + "end_time": "2023-01-31T18:06:08.766172", + "exception": false, + "start_time": "2023-01-31T18:06:08.756012", + "status": "completed" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "#from edc import setup_environment_variables\n", + "#setup_environment_variables()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "74013f50", + "metadata": { + "execution": { + "iopub.execute_input": "2023-01-31T18:06:08.776143Z", + "iopub.status.busy": "2023-01-31T18:06:08.775732Z", + "iopub.status.idle": "2023-01-31T18:06:11.435040Z", + "shell.execute_reply": "2023-01-31T18:06:11.434107Z" + }, + "papermill": { + "duration": 2.667054, + "end_time": "2023-01-31T18:06:11.437338", + "exception": false, + "start_time": "2023-01-31T18:06:08.770284", + "status": "completed" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "from xcube_sh.config import CubeConfig\n", + "from xcube_sh.cube import open_cube\n", + "from xcube_sh.sentinelhub import SentinelHub\n", + "from xcube.core.geom import (\n", + " clip_dataset_by_geometry, \n", + " mask_dataset_by_geometry,\n", + " convert_geometry,\n", + " rasterize_features\n", + ")\n", + "import xarray as xr\n", + "import numpy as np\n", + "import shapely.geometry\n", + "import IPython.display\n", + "import calendar\n", + "\n", + "import datetime\n", + "import os, sys\n", + "%matplotlib inline\n", + "import matplotlib.pyplot as plt\n", + "import rasterio, rioxarray\n", + "import geopandas as gpd\n", + "import pandas as pd\n", + "import warnings\n", + "import scipy\n", + "import pyproj\n", + "from shapely.ops import transform, cascaded_union, unary_union\n", + "from shapely.geometry import shape, Point, Polygon, LineString, MultiPoint, MultiPolygon, mapping \n", + "from rasterio.features import sieve, shapes\n", + "from rasterio.plot import show, plotting_extent\n", + "from rasterio.enums import Resampling\n", + "import json\n", + "from oauthlib.oauth2 import BackendApplicationClient\n", + "from requests_oauthlib import OAuth2Session\n", + "import requests\n", + "import csv\n", + "import shutil" + ] + }, + { + "cell_type": "markdown", + "id": "f06f2c8a", + "metadata": { + "tags": [ + "papermill-error-cell-tag" + ] + }, + "source": [ + "Execution using papermill encountered an exception here and stopped:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "05f95f2c", + "metadata": { + "execution": { + "iopub.execute_input": "2023-01-31T18:06:11.446566Z", + "iopub.status.busy": "2023-01-31T18:06:11.445794Z", + "iopub.status.idle": "2023-01-31T18:06:12.669198Z", + "shell.execute_reply": "2023-01-31T18:06:12.667597Z" + }, + "papermill": { + "duration": 1.229908, + "end_time": "2023-01-31T18:06:12.671031", + "exception": true, + "start_time": "2023-01-31T18:06:11.441123", + "status": "failed" + }, + "tags": [] + }, + "outputs": [ + { + "ename": "DriverError", + "evalue": "grid100k_geo.geojson: No such file or directory", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mCPLE_OpenFailedError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32mfiona/_shim.pyx:83\u001b[0m, in \u001b[0;36mfiona._shim.gdal_open_vector\u001b[0;34m()\u001b[0m\n", + "File \u001b[0;32mfiona/_err.pyx:291\u001b[0m, in \u001b[0;36mfiona._err.exc_wrap_pointer\u001b[0;34m()\u001b[0m\n", + "\u001b[0;31mCPLE_OpenFailedError\u001b[0m: grid100k_geo.geojson: No such file or directory", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mDriverError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn [5], line 6\u001b[0m\n\u001b[1;32m 3\u001b[0m client_secret \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39menviron[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mSH_CLIENT_SECRET\u001b[39m\u001b[38;5;124m'\u001b[39m]\n\u001b[1;32m 5\u001b[0m grid_name \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgrid100k_geo.geojson\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m----> 6\u001b[0m grid100k \u001b[38;5;241m=\u001b[39m \u001b[43mgpd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mread_file\u001b[49m\u001b[43m(\u001b[49m\u001b[43mgrid_name\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 8\u001b[0m res1 \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0.0054\u001b[39m \u001b[38;5;66;03m# 0.00054 # lower resolution for satellite footprints\u001b[39;00m\n\u001b[1;32m 9\u001b[0m res2 \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0.00018\u001b[39m \u001b[38;5;66;03m# Higher resolution for vessel shape and number\u001b[39;00m\n", + "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/geopandas/io/file.py:253\u001b[0m, in \u001b[0;36m_read_file\u001b[0;34m(filename, bbox, mask, rows, engine, **kwargs)\u001b[0m\n\u001b[1;32m 250\u001b[0m path_or_bytes \u001b[38;5;241m=\u001b[39m filename\n\u001b[1;32m 252\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m engine \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mfiona\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[0;32m--> 253\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_read_file_fiona\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 254\u001b[0m \u001b[43m \u001b[49m\u001b[43mpath_or_bytes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfrom_bytes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbbox\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbbox\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmask\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrows\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrows\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[1;32m 255\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 256\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m engine \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpyogrio\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 257\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m _read_file_pyogrio(\n\u001b[1;32m 258\u001b[0m path_or_bytes, bbox\u001b[38;5;241m=\u001b[39mbbox, mask\u001b[38;5;241m=\u001b[39mmask, rows\u001b[38;5;241m=\u001b[39mrows, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 259\u001b[0m )\n", + "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/geopandas/io/file.py:294\u001b[0m, in \u001b[0;36m_read_file_fiona\u001b[0;34m(path_or_bytes, from_bytes, bbox, mask, rows, **kwargs)\u001b[0m\n\u001b[1;32m 291\u001b[0m reader \u001b[38;5;241m=\u001b[39m fiona\u001b[38;5;241m.\u001b[39mopen\n\u001b[1;32m 293\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m fiona_env():\n\u001b[0;32m--> 294\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[43mreader\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpath_or_bytes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;28;01mas\u001b[39;00m features:\n\u001b[1;32m 295\u001b[0m \n\u001b[1;32m 296\u001b[0m \u001b[38;5;66;03m# In a future Fiona release the crs attribute of features will\u001b[39;00m\n\u001b[1;32m 297\u001b[0m \u001b[38;5;66;03m# no longer be a dict, but will behave like a dict. So this should\u001b[39;00m\n\u001b[1;32m 298\u001b[0m \u001b[38;5;66;03m# be forwards compatible\u001b[39;00m\n\u001b[1;32m 299\u001b[0m crs \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 300\u001b[0m features\u001b[38;5;241m.\u001b[39mcrs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124minit\u001b[39m\u001b[38;5;124m\"\u001b[39m]\n\u001b[1;32m 301\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m features\u001b[38;5;241m.\u001b[39mcrs \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124minit\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m features\u001b[38;5;241m.\u001b[39mcrs\n\u001b[1;32m 302\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m features\u001b[38;5;241m.\u001b[39mcrs_wkt\n\u001b[1;32m 303\u001b[0m )\n\u001b[1;32m 305\u001b[0m \u001b[38;5;66;03m# handle loading the bounding box\u001b[39;00m\n", + "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/fiona/env.py:408\u001b[0m, in \u001b[0;36mensure_env_with_credentials..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 405\u001b[0m \u001b[38;5;129m@wraps\u001b[39m(f)\n\u001b[1;32m 406\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapper\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 407\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m local\u001b[38;5;241m.\u001b[39m_env:\n\u001b[0;32m--> 408\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mf\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 409\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 410\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(args[\u001b[38;5;241m0\u001b[39m], \u001b[38;5;28mstr\u001b[39m):\n", + "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/fiona/__init__.py:264\u001b[0m, in \u001b[0;36mopen\u001b[0;34m(fp, mode, driver, schema, crs, encoding, layer, vfs, enabled_drivers, crs_wkt, **kwargs)\u001b[0m\n\u001b[1;32m 261\u001b[0m path \u001b[38;5;241m=\u001b[39m parse_path(fp)\n\u001b[1;32m 263\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m mode \u001b[38;5;129;01min\u001b[39;00m (\u001b[38;5;124m'\u001b[39m\u001b[38;5;124ma\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124m'\u001b[39m):\n\u001b[0;32m--> 264\u001b[0m c \u001b[38;5;241m=\u001b[39m \u001b[43mCollection\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdriver\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdriver\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mencoding\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mencoding\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 265\u001b[0m \u001b[43m \u001b[49m\u001b[43mlayer\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlayer\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43menabled_drivers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43menabled_drivers\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 266\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mw\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[1;32m 267\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m schema:\n\u001b[1;32m 268\u001b[0m \u001b[38;5;66;03m# Make an ordered dict of schema properties.\u001b[39;00m\n", + "File \u001b[0;32m/opt/conda/envs/edc-default-2022.10-14/lib/python3.9/site-packages/fiona/collection.py:162\u001b[0m, in \u001b[0;36mCollection.__init__\u001b[0;34m(self, path, mode, driver, schema, crs, encoding, layer, vsi, archive, enabled_drivers, crs_wkt, ignore_fields, ignore_geometry, **kwargs)\u001b[0m\n\u001b[1;32m 160\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[1;32m 161\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msession \u001b[38;5;241m=\u001b[39m Session()\n\u001b[0;32m--> 162\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msession\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstart\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 163\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmode \u001b[38;5;129;01min\u001b[39;00m (\u001b[38;5;124m'\u001b[39m\u001b[38;5;124ma\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mw\u001b[39m\u001b[38;5;124m'\u001b[39m):\n\u001b[1;32m 164\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msession \u001b[38;5;241m=\u001b[39m WritingSession()\n", + "File \u001b[0;32mfiona/ogrext.pyx:540\u001b[0m, in \u001b[0;36mfiona.ogrext.Session.start\u001b[0;34m()\u001b[0m\n", + "File \u001b[0;32mfiona/_shim.pyx:90\u001b[0m, in \u001b[0;36mfiona._shim.gdal_open_vector\u001b[0;34m()\u001b[0m\n", + "\u001b[0;31mDriverError\u001b[0m: grid100k_geo.geojson: No such file or directory" + ] + } + ], + "source": [ + "# Your client credentials\n", + "client_id = os.environ['SH_CLIENT_ID']\n", + "client_secret = os.environ['SH_CLIENT_SECRET']\n", + "\n", + "grid_name = \"grid100k_geo.geojson\"\n", + "grid100k = gpd.read_file(grid_name)\n", + "\n", + "res1 = 0.0054 # 0.00054 # lower resolution for satellite footprints\n", + "res2 = 0.00018 # Higher resolution for vessel shape and number\n", + "#dres = (res1-res2)/2\n", + "###############################\n", + "# csv parameters\n", + "eo_sensor = 'Sentinel-1'\n", + "indicator_code = 'E1b'\n", + "y_axis = 'Vessel density'\n", + "indicator_name = 'Vessel density'\n", + "data_provider = \"cimbelli\"\n", + "description = 'Number of vessels in the AOI'\n", + "method = 'Object detection of vessels from Sentinel-1 images'\n", + "update_freq = 'Monthly'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2030021e", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# This function extracts the centroids of all the vessels in an image\n", + "def vesselShape(imgtif):\n", + "\n", + " img1tmp = imgtif.astype(rasterio.uint8)\n", + " img2tmp = rasterio.features.sieve(img1tmp.to_numpy(), 15, connectivity=8)\n", + " \n", + " with rasterio.Env():\n", + " image = img2tmp #img_vessels.read(1) # first band\n", + " results = (\n", + " {'properties': {'raster_val': v}, 'geometry': s}\n", + " for i, (s, v) \n", + " in enumerate(\n", + " shapes(image, mask=None, transform=imgtif.rio.transform())))\n", + " geoms = list(results)\n", + " npoly = len(geoms)\n", + " npoly_1 = 0\n", + " \n", + "# for each vessel get shape information\n", + " \n", + " ppc = MultiPoint([])\n", + " if npoly > 0:\n", + " for p in range(npoly):\n", + " val = int(geoms[p]['properties']['raster_val'])\n", + "\n", + " if val==1:\n", + " npoly_1 += 1\n", + " pp = shape(geoms[p]['geometry'])\n", + " pp_centroids = pp.centroid #.coords \n", + " ppc = ppc.union(pp_centroids) \n", + " return ppc, npoly_1\n", + " else:\n", + " return None, 0\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "250f5a60", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "def sentinelInfo(data, box, n):\n", + "\n", + " # Create a session\n", + " client = BackendApplicationClient(client_id=client_id)\n", + " oauth = OAuth2Session(client=client)\n", + "\n", + " # Get token for the session\n", + " token = oauth.fetch_token(token_url='https://services.sentinel-hub.com/oauth/token', client_secret=client_secret)\n", + "\n", + " # All requests using this session will have an access token automatically added\n", + " resp = oauth.get(\"https://services.sentinel-hub.com/oauth/tokeninfo\") \n", + " token1 = token['access_token']\n", + " \n", + " data = '{\"collections\":[\"sentinel-1-grd\"], \"datetime\":\"' + data + '\",\"bbox\": [' + box + '] ,\"limit\": ' + str(n) + '}' # 2020-01-01T00:00:00Z/2020-01-31T23:59:59Z - 10,54.27,11,54.6\n", + " headers = {\"Content-Type\": \"application/json\" , \"Authorization\": \"Bearer \" + token1 }\n", + " endpoint = \"https://services.sentinel-hub.com/api/v1/catalog/search\"\n", + " \n", + " ggg = requests.post(endpoint, data = data, headers=headers)\n", + " \n", + " res = json.loads(ggg.content.decode('UTF-8'))\n", + " id = res['features'][0]['id']\n", + " sat = id[2:3]\n", + " \n", + " ddatetime = res['features'][0]['properties']['datetime'][:-1]\n", + " \n", + " return sat, ddatetime \n", + " " + ] + }, + { + "cell_type": "markdown", + "id": "10c296d4", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "source": [ + "### Definition of the boxes ID to be analysed. Please edit them in the list below the map.\n", + "\n", + "_The mask of all European water areas has been produced from the coastline shapefile available at the page:_\n", + "\n", + "https://www.eea.europa.eu/data-and-maps/data/eea-coastline-for-analysis-1/gis-data/europe-coastline-shapefile\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3df7aecf", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "%%html\n", + "" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fc1c1582", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "with open('grid100k_geo.geojson', 'r') as f:\n", + " data = json.load(f)\n", + "\n", + "from ipyleaflet import Map, basemaps, basemap_to_tiles, GeoJSON, Marker, DivIcon, WidgetControl\n", + "m = Map(\n", + " basemap = basemap_to_tiles(basemaps.OpenStreetMap.Mapnik),\n", + " center = (49.0, 9.0),\n", + " zoom = 5\n", + " )\n", + "geo_json = GeoJSON(\n", + " data = data,\n", + " style = {'opacity':1, 'fillOpacity':0.1, 'weight':1, 'fillColor':'green'},\n", + " hover_style = {'color':'blue', 'fillOpacity':0.5, 'weight':1.2}\n", + " )\n", + "for i in range(0,len(grid100k)):\n", + " extx = grid100k.iloc[i][\"geometry\"].bounds[2]-grid100k.iloc[i][\"geometry\"].bounds[0]\n", + " lo = grid100k.iloc[i][\"geometry\"].bounds[0] + extx*0.25 #grid100k.iloc[i][\"geometry\"].centroid.x\n", + " la = grid100k.iloc[i][\"geometry\"].centroid.y\n", + " marker = Marker(\n", + " location = [la, lo], \n", + " icon = DivIcon(html=f\"\"\"
{grid100k.iloc[i]['Aoi_Id']}
\"\"\",),\n", + " draggable=False\n", + " )\n", + " m.add_layer(marker)\n", + "m.add_layer(geo_json)\n", + "m.layout.height = '800px'\n", + "m" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c9b8b293", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# TILE LIST\n", + "\n", + "# 'BE1','DE1','DE2','DK2','EE1','GR7',\n", + "# 'ES1','ES18','ES3','ES4','ES5','FR2',\n", + "# 'FR3','FR4','IE2','IT3','IT16','IT17',\n", + "# 'IT18','IT19','LT1','LV1','NL3','NL4',\n", + "# 'NO3','NO4','PL1','PT2','RO1','RU1',\n", + "# 'TR1','TR2'\n", + "\n", + "bbox_list = ('NL3',)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a3a295f5", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "######################################################\n", + "# Density of vessels for each 1km cell\n", + "######################################################\n", + "\n", + "warnings.filterwarnings('ignore')\n", + "# generate csv\n", + "\n", + "gen_date = datetime.datetime.today().strftime('%Y%m%dT%H%M%S')\n", + "csv_file = indicator_code + '_' + data_provider + '_' + gen_date + '.csv'\n", + "header = ['AOI','Country','Region','City','Site Name','Description','Method','EO Sensor','Input Data', \n", + " 'Indicator code','Time','Measurement Value','Reference Description','Reference time','Reference value','Rule', \n", + " 'Indicator Value','Sub-AOI','Y axis','Indicator Name','Color code','Data Provider','AOI_ID','Update Frequency']\n", + "\n", + "if os.path.exists('out/' + csv_file):\n", + " os.remove('out/' + csv_file) \n", + "with open('out/' + csv_file, 'w', encoding='UTF8') as f:\n", + " writer = csv.writer(f)\n", + " writer.writerow(header)\n", + "\n", + "# start processing\n", + "\n", + " for b in range(len(bbox_list)):\n", + " \n", + " for month in months:\n", + " \n", + " \n", + " d1 = datetime.datetime(year, month, 1,12,0,0)\n", + " d2 = datetime.datetime(year, month, calendar.monthrange(year, month)[1],12,0,0)\n", + " time_interval= d1.strftime(\"%Y-%m-%d\"),d2.strftime(\"%Y-%m-%d\")\n", + " month = str(month) if (month)>9 else \"0\"+ str(month)\n", + " \n", + " arr_s1 = []\n", + " poly = grid100k[grid100k.Aoi_Id == bbox_list[b]]\n", + "\n", + " # --------------------------\n", + " # csv file\n", + " id100 = poly.id_100k.values[0]\n", + " aoi_center = str(round(poly.geometry.centroid.values[0].y,6)) + \", \" + str(round(poly.geometry.centroid.values[0].x,6))\n", + " co = poly.Country.values[0]\n", + " port = poly.Port.values[0]\n", + " city = poly.City.values[0]\n", + " bbox = str(round(poly.geometry.bounds.values[0][0],3)) + ',' + str(round(poly.geometry.bounds.values[0][1],3)) + ',' + str(round(poly.geometry.bounds.values[0][2],3)) + ',' + str(round(poly.geometry.bounds.values[0][3],3)) \n", + "\n", + " # ---------------------------------------------\n", + " tmp_masks = []\n", + " tmp_masks.clear() \n", + " # ---------------------------------------------\n", + " config_s1 = CubeConfig(dataset_name = 'S1GRD',\n", + " band_names = ['VV'],\n", + " tile_size = [512, 512],\n", + " crs = \"http://www.opengis.net/def/crs/EPSG/0/4326\",\n", + " spatial_res = res2, \n", + " bbox = bbox,\n", + " time_range = time_interval, \n", + " ) \n", + " s1 = open_cube(config_s1)\n", + " n_img = len(s1.VV.time)\n", + " print(\"aoi\", bbox_list[b], \" - \",n_img, 'images in the month of', month, '-',year)\n", + " print(\" day\") \n", + " # ---------------------------------------------\n", + " points = MultiPoint([]) \n", + " \n", + " for j in range(n_img): \n", + "\n", + " pp1 = pd.DataFrame()\n", + " img0 = s1.VV[j]\n", + "\n", + " # ---------------------------------------------\n", + " # for footprints\n", + " tmp_mask = img0.notnull() \n", + " tmp_mask = tmp_mask.astype(rasterio.uint8)\n", + " tmp_mask.rio.write_crs(\"EPSG:4326\", inplace=True)\n", + "\n", + " tmp_masks.append(tmp_mask)\n", + "\n", + " # ---------------------------------------------\n", + " # define geojson filename\n", + " daytime = str(s1.VV.time[j].values).replace(\"-\",\"\").replace(\":\",\"\")\n", + " daytime = daytime[:daytime.find(\".\")]\n", + " daytime1 = str(s1.VV.time[j].values).replace(\"-\",\"/\").replace(\"T\",\" \")\n", + " daytime1 = daytime1[:daytime1.find(\".\")]\n", + "\n", + " day = daytime[6:11]\n", + " hour = int(day[-2:])\n", + " hour1 = str(hour) if (hour)>9 else \"0\"+ str(hour)\n", + " print(\" \", day, end=\"..\")\n", + "\n", + "\n", + " # mask image with land and fixed semarks (windfarms, ...)\n", + " img1 = img0 > 0.7\n", + " img2 = img1.astype(rasterio.uint8)\n", + " img2.rio.write_crs(\"EPSG:4326\", inplace=True)\n", + "\n", + " land = gpd.read_file('sea_4326.geojson')\n", + " clipped = img2.rio.clip(land.geometry, land.crs, drop=True, invert=False)\n", + "\n", + " # assess the number of vessels\n", + " pp, nvess = vesselShape(clipped) \n", + " print(nvess, \"vessels\")\n", + "\n", + " # ---------------------------\n", + " # geojson\n", + " # save vessels' centroids from a single image to a geojson file \n", + "\n", + " if not(pp.is_empty):\n", + " if type(pp) == shapely.geometry.point.Point: # only 1 vessel\n", + " points1 = [Point(pp.x, pp.y)]\n", + " pp1 = gpd.GeoDataFrame(geometry=points1, crs=4326)\n", + " pp1.rename(columns ={'geometry':'0'}) \n", + " elif len(pp)>0:\n", + " pp1 = gpd.GeoDataFrame(pp, geometry=0, crs=4326) #'EPSG:4326')\n", + "\n", + "\n", + " # only if there are vessels\n", + " if len(pp1)>0:\n", + "\n", + " # ===================== csv ========================== #\n", + "\n", + " daytime2 = daytime1[:10]\n", + " daytime2 = daytime2.replace('/', '-') + \"T\" + hour1 + \":00:00Z/\" + daytime2.replace('/', '-') + \"T\" + hour1 + \":59:59Z\"\n", + "\n", + " # it could happen 2 images in the same day....\n", + "\n", + " ssat, ddate = sentinelInfo(daytime2, bbox, 1)\n", + " input_data = 'S1' + ssat +' - GRD'\n", + " measurement_value = len(pp1)\n", + " indicator_value = pp.wkt\n", + "\n", + " #['AOI','Country','Region','City','Site Name','Description','Method','EO Sensor','Input Data', \n", + " #'Indicator code','Time','Measurement Value','Reference Description','Reference time','Reference value','Rule', \n", + " #'Indicator Value','Sub-AOI','Y axis','Indicator Name','Color code','Data Provider','AOI_ID','Update Frequency']\n", + "\n", + " info = [aoi_center,co,'',city,port,description,method,eo_sensor,input_data,\n", + " indicator_code,ddate,measurement_value,'',ddate,'','','',\n", + " poly.geometry.values[0],'',indicator_name,'RED',data_provider,bbox_list[b],update_freq]\n", + " writer.writerow(info)\n", + "\n", + " # ---------------------------\n", + "\n", + " # geojson \n", + "\n", + " date_geojson = ddate.replace(\"-\",\"/\").replace(\"T\",\" \")\n", + " date_geojson1 = ddate.replace(\"-\",\"\").replace(\":\",\"\")\n", + " pp1['TIMESTAMP UTC'] = date_geojson\n", + " pp1['CLASSIFICATION'] = \"Vessel\"\n", + "\n", + " geojson_file = indicator_code + \"_\" + str(bbox_list[b]) + \"_\" + date_geojson1 + \".geojson\"\n", + " #print(daytime, geojson_file)\n", + "\n", + " if os.path.exists('out/' + geojson_file):\n", + " os.remove('out/' + geojson_file) \n", + " #pp1.to_crs('EPSG:3035').to_file('out/' + geojson_file, driver='GeoJSON')\n", + " pp1.to_crs('EPSG:4326').to_file('out/' + geojson_file, driver='GeoJSON')\n", + "\n", + " with open('out/' + geojson_file, 'r') as file:\n", + " filedata = file.read()\n", + " filedata = filedata.replace('/', '\\/')\n", + " #remove info on crs\n", + " filedata = filedata.replace('\"crs\": { \"type\": \"name\", \"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },\\n', '')\n", + "\n", + " with open('out/' + geojson_file, 'w') as file:\n", + " file.write(filedata)\n", + " # ---------------------------\n", + "\n", + " points = points.union(pp) \n", + "\n", + " else:\n", + " #nv = 0\n", + " #print(\"n\", end=\"..\")\n", + " continue\n", + "\n", + " else:\n", + " #nv = 0\n", + " #print(\"n\", end=\"..\")\n", + " continue\n", + "\n", + " # ---------------------------------------------\n", + " # for footprints\n", + " print(\" footprints\")\n", + " mask = xr.concat(tmp_masks, dim='time')\n", + " mask2 = mask.sum(dim='time')\n", + " mask2 = mask2.astype(rasterio.uint8)\n", + " mask2.rio.write_crs(\"EPSG:4326\", inplace=True)\n", + "\n", + " #vectorize raster of footprints\n", + " with rasterio.Env(): \n", + " results = (\n", + " {'properties': {'raster_val': v}, 'geometry': s}\n", + " for i, (s, v) \n", + " in enumerate(\n", + " shapes(mask2.values, mask=None, connectivity=4, transform=mask2.rio.transform())))\n", + " ftprints = list(results)\n", + "\n", + " #---------------------------------------------------------\n", + " # create geojson from footprints\n", + "\n", + " tmp_list = []\n", + "\n", + " for p2 in range(len(ftprints)):\n", + " val = int(ftprints[p2]['properties']['raster_val'])\n", + " ppp2 = shape(ftprints[p2]['geometry']) \n", + " tmp_list.append({\n", + " 'geometry' : Polygon(ppp2),\n", + " 'val': val\n", + " })\n", + " mpl = gpd.GeoDataFrame(tmp_list, crs='epsg:4326')\n", + " mpl.to_file(\"mpl.geojson\", driver='GeoJSON') \n", + "\n", + " # ---------------------------------------------\n", + "\n", + " ppp = gpd.GeoDataFrame(points,geometry=0, crs='EPSG:4326')\n", + " ppp.insert(1, 't', 0)\n", + " ppp.rename_geometry('geometry').to_file(\"vvv.geojson\", driver='GeoJSON')\n", + " points3 = ppp.to_crs('EPSG:3035')\n", + "\n", + " # count vessels for each vector grid with 1-km pixel\n", + " \n", + " print(\" vessel count\")\n", + " grid1k = gpd.read_file('grid1k/'+ bbox_list[b] + '.geojson')\n", + " if os.path.exists('fin.geojson'):\n", + " os.remove('fin.geojson')\n", + " dfsjoin = gpd.sjoin(grid1k, points3,how=\"inner\") #,predicate=\"within\"\n", + " dfpivot = pd.pivot_table(dfsjoin,index='id',columns='t',aggfunc={'t':len})\n", + " dfpivot.columns = dfpivot.columns.droplevel()\n", + " dfpolynew = grid1k.merge(dfpivot, how='left', on='id')\n", + " df = dfpolynew.rename(columns ={dfpolynew.columns[-1]:'vessels'})\n", + " df.columns = df.columns.astype(str)\n", + " df.to_file(\"fin.geojson\", driver='GeoJSON')\n", + " \n", + " # convert vector grid to raster and calc density\n", + " print(\" density\")\n", + "\n", + " # create rasters of number of vessels and acquisitions\n", + " raster1k = 'grid1k/'+ bbox_list[b] + '.tif'\n", + " !gdal_calc.py --quiet --overwrite -A $raster1k --outfile=n_vessels.tif --calc=\"0\"\n", + " shutil.copy('n_vessels.tif', 'n_acquisitions.tif')\n", + " !gdal_rasterize -q -add -a vessels fin.geojson n_vessels.tif\n", + " !gdal_rasterize -q -add -a val mpl.geojson n_acquisitions.tif\n", + "\n", + "\n", + " # raster of the vessel density\n", + " dens_temp = \"out/density_vessels_temp.tif\"\n", + "\n", + " dens = \"out/density_vessels_\" + bbox_list[b] + \"_\" + str(year) + str(month) + '01_cog.tiff'\n", + " !gdal_calc.py --quiet --overwrite -A n_vessels.tif -B n_acquisitions.tif --outfile=$dens_temp --calc=\"(1000*A)/B\"\n", + " !gdal_translate -q -of COG -co COMPRESS=DEFLATE -co BLOCKSIZE=1024 -co RESAMPLING=AVERAGE -co OVERVIEWS=IGNORE_EXISTING -ot Int16 -a_nodata -9999 $dens_temp $dens\n", + "\n", + "\n", + " if os.path.exists(dens_temp):\n", + " os.remove(dens_temp)\n", + " if os.path.exists('fin.geojson'):\n", + " os.remove('fin.geojson')\n", + " if os.path.exists('vvv.geojson'):\n", + " os.remove('vvv.geojson') \n", + " print(\" ok\")\n", + "\n", + "\n", + "print(\"ok\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "57d04b87", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "shutil.make_archive(\"output\",'zip',\"out\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c3af2d12", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "shutil.make_archive(\"grid1k\",'zip',\"grid1k\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "864604ac", + "metadata": { + "papermill": { + "duration": null, + "end_time": null, + "exception": null, + "start_time": null, + "status": "pending" + }, + "tags": [] + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python [conda env:edc-default-2022.10-14]", + "language": "python", + "name": "conda-env-edc-default-2022.10-14-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + }, + "papermill": { + "default_parameters": {}, + "duration": 5.819728, + "end_time": "2023-01-31T18:06:13.298429", + "environment_variables": {}, + "exception": true, + "input_path": "/tmp/tmphitp2png", + "output_path": "/tmp/notebook_output.ipynb", + "parameters": {}, + "start_time": "2023-01-31T18:06:07.478701", + "version": "2.3.4" + }, + "properties": { + "authors": [ + { + "id": "d8a2b943-31ef-402a-a1c9-315163cc36f1", + "name": "ryy83337@xcoxc.com" + } + ], + "description": "Vessel Density based on Copernicus Sentinel-1", + "id": "9dc6586e-9aea-485e-acac-755f4eb5ef17", + "license": "Commercial", + "license_notes": null, + "license_price": "5.00", + "name": "Vessel Density based on Copernicus Sentinel-1", + "requirements": [ + "eurodatacube", + "eurodatacube-xcube-gen" + ], + "tags": [ + "EO Data", + "Download Service", + "Jupyter", + "Sentinel Hub", + "Vector Data", + "xcube" + ], + "tosAgree": true, + "type": "Jupyter Notebook", + "version": "0.0.1" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} \ No newline at end of file From 2b77beabd02001c746b57e1cd769e2e79d59b9d6 Mon Sep 17 00:00:00 2001 From: eurodatacube-submissions <61697821+eurodatacube-submissions@users.noreply.github.com> Date: Tue, 31 Jan 2023 20:15:35 +0100 Subject: [PATCH 3/3] Add executed notebook under notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb [skip ci] --- .../Vessel_Density_from_Sentinel-1.ipynb | 88 +++++++++---------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb b/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb index eb87e191..da1ff565 100644 --- a/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb +++ b/notebooks/contributions/Vessel_Density_from_Sentinel-1.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "53f2e93e", + "id": "32bdafe2", "metadata": { "tags": [ "papermill-error-cell-tag" @@ -17,10 +17,10 @@ "id": "fd59ebb0", "metadata": { "papermill": { - "duration": 0.005329, - "end_time": "2023-01-31T18:06:08.716096", + "duration": 0.004526, + "end_time": "2023-01-31T19:15:29.601478", "exception": false, - "start_time": "2023-01-31T18:06:08.710767", + "start_time": "2023-01-31T19:15:29.596952", "status": "completed" }, "tags": [] @@ -45,16 +45,16 @@ "id": "269da3e9", "metadata": { "execution": { - "iopub.execute_input": "2023-01-31T18:06:08.726356Z", - "iopub.status.busy": "2023-01-31T18:06:08.725705Z", - "iopub.status.idle": "2023-01-31T18:06:08.735822Z", - "shell.execute_reply": "2023-01-31T18:06:08.735015Z" + "iopub.execute_input": "2023-01-31T19:15:29.609997Z", + "iopub.status.busy": "2023-01-31T19:15:29.609339Z", + "iopub.status.idle": "2023-01-31T19:15:29.618067Z", + "shell.execute_reply": "2023-01-31T19:15:29.617302Z" }, "papermill": { - "duration": 0.017853, - "end_time": "2023-01-31T18:06:08.737704", + "duration": 0.014794, + "end_time": "2023-01-31T19:15:29.620017", "exception": false, - "start_time": "2023-01-31T18:06:08.719851", + "start_time": "2023-01-31T19:15:29.605223", "status": "completed" }, "tags": [] @@ -71,16 +71,16 @@ "id": "7b1a4422", "metadata": { "execution": { - "iopub.execute_input": "2023-01-31T18:06:08.746698Z", - "iopub.status.busy": "2023-01-31T18:06:08.746274Z", - "iopub.status.idle": "2023-01-31T18:06:08.750335Z", - "shell.execute_reply": "2023-01-31T18:06:08.749597Z" + "iopub.execute_input": "2023-01-31T19:15:29.628053Z", + "iopub.status.busy": "2023-01-31T19:15:29.627232Z", + "iopub.status.idle": "2023-01-31T19:15:29.630878Z", + "shell.execute_reply": "2023-01-31T19:15:29.630241Z" }, "papermill": { - "duration": 0.01053, - "end_time": "2023-01-31T18:06:08.752056", + "duration": 0.009528, + "end_time": "2023-01-31T19:15:29.632458", "exception": false, - "start_time": "2023-01-31T18:06:08.741526", + "start_time": "2023-01-31T19:15:29.622930", "status": "completed" }, "tags": [] @@ -97,16 +97,16 @@ "id": "4b5a4898", "metadata": { "execution": { - "iopub.execute_input": "2023-01-31T18:06:08.760844Z", - "iopub.status.busy": "2023-01-31T18:06:08.760513Z", - "iopub.status.idle": "2023-01-31T18:06:08.764351Z", - "shell.execute_reply": "2023-01-31T18:06:08.763575Z" + "iopub.execute_input": "2023-01-31T19:15:29.640369Z", + "iopub.status.busy": "2023-01-31T19:15:29.639775Z", + "iopub.status.idle": "2023-01-31T19:15:29.643502Z", + "shell.execute_reply": "2023-01-31T19:15:29.642836Z" }, "papermill": { - "duration": 0.01016, - "end_time": "2023-01-31T18:06:08.766172", + "duration": 0.009586, + "end_time": "2023-01-31T19:15:29.645133", "exception": false, - "start_time": "2023-01-31T18:06:08.756012", + "start_time": "2023-01-31T19:15:29.635547", "status": "completed" }, "tags": [] @@ -123,16 +123,16 @@ "id": "74013f50", "metadata": { "execution": { - "iopub.execute_input": "2023-01-31T18:06:08.776143Z", - "iopub.status.busy": "2023-01-31T18:06:08.775732Z", - "iopub.status.idle": "2023-01-31T18:06:11.435040Z", - "shell.execute_reply": "2023-01-31T18:06:11.434107Z" + "iopub.execute_input": "2023-01-31T19:15:29.653012Z", + "iopub.status.busy": "2023-01-31T19:15:29.652114Z", + "iopub.status.idle": "2023-01-31T19:15:32.248482Z", + "shell.execute_reply": "2023-01-31T19:15:32.247593Z" }, "papermill": { - "duration": 2.667054, - "end_time": "2023-01-31T18:06:11.437338", + "duration": 2.602875, + "end_time": "2023-01-31T19:15:32.250652", "exception": false, - "start_time": "2023-01-31T18:06:08.770284", + "start_time": "2023-01-31T19:15:29.647777", "status": "completed" }, "tags": [] @@ -179,7 +179,7 @@ }, { "cell_type": "markdown", - "id": "f06f2c8a", + "id": "411a5f3c", "metadata": { "tags": [ "papermill-error-cell-tag" @@ -195,16 +195,16 @@ "id": "05f95f2c", "metadata": { "execution": { - "iopub.execute_input": "2023-01-31T18:06:11.446566Z", - "iopub.status.busy": "2023-01-31T18:06:11.445794Z", - "iopub.status.idle": "2023-01-31T18:06:12.669198Z", - "shell.execute_reply": "2023-01-31T18:06:12.667597Z" + "iopub.execute_input": "2023-01-31T19:15:32.258779Z", + "iopub.status.busy": "2023-01-31T19:15:32.257905Z", + "iopub.status.idle": "2023-01-31T19:15:33.148608Z", + "shell.execute_reply": "2023-01-31T19:15:33.147383Z" }, "papermill": { - "duration": 1.229908, - "end_time": "2023-01-31T18:06:12.671031", + "duration": 0.896904, + "end_time": "2023-01-31T19:15:33.150613", "exception": true, - "start_time": "2023-01-31T18:06:11.441123", + "start_time": "2023-01-31T19:15:32.253709", "status": "failed" }, "tags": [] @@ -828,14 +828,14 @@ }, "papermill": { "default_parameters": {}, - "duration": 5.819728, - "end_time": "2023-01-31T18:06:13.298429", + "duration": 5.539753, + "end_time": "2023-01-31T19:15:34.073256", "environment_variables": {}, "exception": true, - "input_path": "/tmp/tmphitp2png", + "input_path": "/tmp/tmpimosmukp", "output_path": "/tmp/notebook_output.ipynb", "parameters": {}, - "start_time": "2023-01-31T18:06:07.478701", + "start_time": "2023-01-31T19:15:28.533503", "version": "2.3.4" }, "properties": {