Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Nov 14, 2023
1 parent fd23e2c commit 0aa9fab
Showing 1 changed file with 39 additions and 26 deletions.
65 changes: 39 additions & 26 deletions datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,20 +22,14 @@

import random
from datetime import datetime, timedelta

import geopandas as gpd
import pystac
import pystac_client
import numpy as np
import planetary_computer as pc
import pystac_client
import stackstac
import xarray as xr
import numpy as np
from shapely.geometry import box, shape, Point
import leafmap
from rasterio.errors import RasterioIOError
from rasterio.warp import transform_bounds
from getpass import getpass
import os
import json
from shapely.geometry import box

STAC_API = "https://planetarycomputer.microsoft.com/api/stac/v1"
S2_BANDS = ["B02", "B03", "B04", "B05", "B06", "B07", "B08", "B8A", "B11", "B12", "SCL"]
Expand Down Expand Up @@ -75,8 +69,8 @@ def get_week(year, month, day):
date = datetime(year, month, day)
start_of_week = date - timedelta(days=date.weekday())
end_of_week = start_of_week + timedelta(days=6)
start_date_str = start_of_week.strftime('%Y-%m-%d')
end_date_str = end_of_week.strftime('%Y-%m-%d')
start_date_str = start_of_week.strftime("%Y-%m-%d")
end_date_str = end_of_week.strftime("%Y-%m-%d")
return f"{start_date_str}/{end_date_str}"


Expand Down Expand Up @@ -133,8 +127,17 @@ def search_sentinel2(week, aoi, cloud_cover_percentage, nodata_pixel_percentage)
},
{"op": "anyinteracts", "args": [{"property": "datetime"}, week]},
{"op": "=", "args": [{"property": "collection"}, "sentinel-2-l2a"]},
{"op": "<=", "args": [{"property": "eo:cloud_cover"}, cloud_cover_percentage]},
{"op": "<=", "args": [{"property": "s2:nodata_pixel_percentage"}, nodata_pixel_percentage]},
{
"op": "<=",
"args": [{"property": "eo:cloud_cover"}, cloud_cover_percentage],
},
{
"op": "<=",
"args": [
{"property": "s2:nodata_pixel_percentage"},
nodata_pixel_percentage,
],
},
],
},
)
Expand Down Expand Up @@ -247,9 +250,7 @@ def search_dem(BBOX, catalog, week, s2_items):
Returns:
- tuple: A tuple containing DEM items and DEM GeoDataFrame.
"""
search = catalog.search(
collections=["cop-dem-glo-30"], bbox=BBOX
)
search = catalog.search(collections=["cop-dem-glo-30"], bbox=BBOX)
dem_items = search.item_collection()
print(f"Found {len(dem_items)} items")

Expand Down Expand Up @@ -287,7 +288,9 @@ def make_dataarrays(s2_items, s1_items, dem_items, BBOX, resolution, epsg):
)

da_sen1: xr.DataArray = stackstac.stack(
items=s1_items[1:], # To only accept the same orbit state and date. Need better way to do this.
items=s1_items[
1:
], # To only accept the same orbit state and date. Need better way to do this.
assets=["vh", "vv"], # SAR polarizations
epsg=epsg,
bounds_latlon=BBOX, # W, S, E, N
Expand Down Expand Up @@ -345,13 +348,17 @@ def merge_datarrays(da_sen2, da_sen1, da_dem):
Returns:
- xr.DataArray: Merged xarray DataArray.
"""
da_merge = xr.merge([da_sen2, da_sen1, da_dem], compat='override')
da_merge = xr.merge([da_sen2, da_sen1, da_dem], compat="override")
print("Merged datarray: ", da_merge)
print("Time variables (S2, merged): ", da_sen2.time.values, da_merge.time.values) # da_sen1.time.values, da_dem.time.values
print(
"Time variables (S2, merged): ", da_sen2.time.values, da_merge.time.values
) # da_sen1.time.values, da_dem.time.values
return da_merge


def process(year1, year2, aoi, resolution, epsg, cloud_cover_percentage, nodata_pixel_percentage):
def process(
year1, year2, aoi, resolution, epsg, cloud_cover_percentage, nodata_pixel_percentage
):
"""
Process Sentinel-2, Sentinel-1, and DEM data for a specified time range, area of interest (AOI),
resolution, EPSG code, cloud cover percentage, and nodata pixel percentage.
Expand All @@ -372,23 +379,29 @@ def process(year1, year2, aoi, resolution, epsg, cloud_cover_percentage, nodata_
date, YEAR, MONTH, DAY, CLOUD = get_conditions(year1, year2)
week = get_week(YEAR, MONTH, DAY)

catalog, s2_items, s2_items_gdf, BBOX = search_sentinel2(week, aoi, cloud_cover_percentage, nodata_pixel_percentage)
catalog, s2_items, s2_items_gdf, BBOX = search_sentinel2(
week, aoi, cloud_cover_percentage, nodata_pixel_percentage
)

s1_items, s1_gdf = search_sentinel1(BBOX, catalog, week)
# s1_items, s1_gdf = search_sentinel1_calc_max_area(BBOX, catalog, week) # WIP


dem_items, dem_gdf = search_dem(BBOX, catalog, week, s2_items)

da_sen2, da_sen1, da_dem = make_dataarrays(s2_items, s1_items, dem_items, BBOX, resolution, epsg)
da_sen2, da_sen1, da_dem = make_dataarrays(
s2_items, s1_items, dem_items, BBOX, resolution, epsg
)

da_merge = merge_datarrays(da_sen2, da_sen1, da_dem)
return da_merge


# EXAMPLE
california_tile = gpd.read_file("ca.geojson")
california_tile = gpd.read_file("ca.geojson")
sample = california_tile.sample(1)
aoi = sample.iloc[0].geometry
cloud_cover_percentage = 50
nodata_pixel_percentage = 20
process(2017, 2023, aoi, 10, 26910, cloud_cover_percentage, nodata_pixel_percentage) # UTM Zone 10N and spatial resolution of 10 metres
process(
2017, 2023, aoi, 10, 26910, cloud_cover_percentage, nodata_pixel_percentage
) # UTM Zone 10N and spatial resolution of 10 metres

0 comments on commit 0aa9fab

Please sign in to comment.