Hi William,
Thanks for the reply! I’ve actually rerun it this morning with a few edits and it works…
Sorry, I should have put an explainer as the function is a mess!
The idea is to take a large area, e.g., North Africa, break it up into smaller tiles, then for each tile, check whether both ascending and descending sentinel 1 GRD tracts and collect the metadata from all products that meet the search criteria in the SentinelHubCatalog().search. This is done at tile level as the areas are large enough that they have partial coverage of both tracts etc. This is the isTractAvailable() method and this part of the code:
for collection in [DataCollection.SENTINEL1_IW_ASC, DataCollection.SENTINEL1_IW_DES]:
available_tiles[collection] = []
for tile_bbox in bbox_list:
available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
if available:
available_tiles[collection].append(tile_bbox)
getSubArea() gets the relevant requests for available S1GRD tiles etc., selecting the most recent where there is multiple S1GRD passes for the same tile over the time range.
sh_requests = []
for collection, available_tile_bboxes in available_tiles.items():
mosaic_order = MosaickingOrder.MOST_RECENT if collection == DataCollection.SENTINEL1_IW_DES else MosaickingOrder.MOST_RECENT
for tile_bbox in available_tile_bboxes:
available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
if available:
search_results_list.append(search_results)
request = getSubArea(tile_bbox, collection, mosaic_order)
sh_requests.append(request)
Where is gets a bit messy is repeatedly calling the isTractAvailable method. This is as I really struggled to get detailed metadata from the evalscript and SentinelHubRequest by following the guidance here. So, I used the SentinelHubCatalog().search() and some indexing to ensure I only wrote the metadata for downloaded tiles to .json files. It’s messy but it now works. Here, is the now working code if anyone else is interested in similar things:
def runSARdownload_metadata(boundingbox, area, resolution=(30, 30),
start_date="2018-07-01", end_date="2018-08-30",
crs=CRS.WGS84, config=None,
SENT1_OUTPUT_FOLDER="output"):
print(f"Running SAR downloader for area: {area}")
bbox = box(boundingbox[0], boundingbox[1], boundingbox[2], boundingbox[3])
assert isinstance(bbox, Polygon)
tile_splits = UtmZoneSplitter([bbox], crs, (50000, 50000))
export_folder = os.path.join(SENT1_OUTPUT_FOLDER, area)
if not os.path.exists(export_folder):
os.makedirs(export_folder)
evalscript = """
//VERSION=3
function setup() {
return {input: ["VV"],
output: { id:"default", bands: 1, sampleType: SampleType.FLOAT32}}}
function evaluatePixel(samples) {return [samples.VV]}
"""
def isTractAvailable(collection, tile_bbox, start_date, end_date):
available = False
search_results = []
try:
search_results = SentinelHubCatalog().search(
collection=collection,
bbox=tile_bbox,
time=(start_date, end_date),
limit=None)
if search_results:
available = True
except Exception as e:
print(f"Error checking data availability: {e}")
available = False
return available, search_results
def extract_required_metadata(search_results):
search_results_list = list(search_results)[0]
metadata = {
'id': search_results_list['id'],
'date_time': search_results_list['properties']['datetime'],
'orbit_state': search_results_list['properties']['sat:orbit_state'],
'platform': search_results_list['properties']['platform'],
'resolution_azimuth': search_results_list['properties']['sar:resolution_azimuth']
}
return metadata
def store_metadata(search_results, directory):
metadata = extract_required_metadata(search_results)
metadata_file = os.path.join(directory, "detailed_metadata.json")
with open(metadata_file, 'w') as f:
json.dump(metadata, f)
def getSubArea(bbox, collection, mosaic_order):
orbit_direction = 'ASC' if collection == DataCollection.SENTINEL1_IW_ASC else 'DES'
coordinate = str(tile_bbox.lower_left[0])[:3]+str(tile_bbox.lower_left[1])[:3]
tile_folder_name = f"{orbit_direction}_{coordinate}"
tile_folder = os.path.join(export_folder, tile_folder_name)
if not os.path.exists(tile_folder):
os.makedirs(tile_folder)
return SentinelHubRequest(
evalscript=evalscript,
input_data=[
SentinelHubRequest.input_data(
data_collection=collection,
time_interval=(start_date, end_date),
mosaicking_order=mosaic_order,
other_args={
"processing": {
"backCoeff": "SIGMA0_ELLIPSOID",
}})],
responses=[
SentinelHubRequest.output_response("default", MimeType.TIFF),
],
bbox=bbox,
resolution=resolution,
data_folder=tile_folder,
config=config,)
bbox_list = tile_splits.get_bbox_list()
available_tiles = {}
search_results_list = []
for collection in [DataCollection.SENTINEL1_IW_ASC, DataCollection.SENTINEL1_IW_DES]:
available_tiles[collection] = []
for tile_bbox in bbox_list:
available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
if available:
available_tiles[collection].append(tile_bbox)
sh_requests = []
for collection, available_tile_bboxes in available_tiles.items():
mosaic_order = MosaickingOrder.MOST_RECENT if collection == DataCollection.SENTINEL1_IW_DES else MosaickingOrder.MOST_RECENT
for tile_bbox in available_tile_bboxes:
available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
if available:
search_results_list.append(search_results)
request = getSubArea(tile_bbox, collection, mosaic_order)
sh_requests.append(request)
if sh_requests:
dl_requests = [request.download_list[0] for request in sh_requests]
downloaded_data = SentinelHubDownloadClient(config=config).download(dl_requests, max_threads=10)
else:
print("No download requests were generated. Check the tile availability and other parameters.")
for idx, (collection, available_tile_bboxes) in enumerate(available_tiles.items()):
for tile_bbox in available_tile_bboxes:
orbit_direction = 'ASC' if collection == DataCollection.SENTINEL1_IW_ASC else 'DES'
coordinate = str(tile_bbox.lower_left[0])[:3]+str(tile_bbox.lower_left[1])[:3]
tile_folder_name = f"{orbit_direction}_{coordinate}"
tile_folder = os.path.join(export_folder, tile_folder_name)
store_metadata(search_results_list[idx], tile_folder)
if __name__ == "__main__":
# Set up sentinel-hub api
config = SHConfig()
config.sh_client_id = ###
config.sh_client_secret = ###
config.instance_id = ###
config.save()
ROOT = os.path.abspath('data/')
SENT1_OUTPUT_FOLDER = os.path.join(ROOT, 'S1GRD_metadata/')
os.makedirs(SENT1_OUTPUT_FOLDER, exist_ok=True)
bbox = (-118.26, 36.88, -117.76, 37.38)
area = 'DV_Test'
test = os.path.join(SENT1_OUTPUT_FOLDER, area)
runSARdownload_metadata(bbox, area, config=config, SENT1_OUTPUT_FOLDER=SENT1_OUTPUT_FOLDER)
Also, thanks for the on the Copernicus Data Space Ecosystem. Do you know a timeframe for this? And what does this mean sentinel-hub-py?