Issue with downloading S1GRD data via Processing API and sentinel-hub-py?

Hi All,

I have no idea why my code isn’t working (apologies it’s a bit ugly). It was working yesterday, I added some segments to get generate a detailed_metadata.json file for each downloaded tile so I can get some additional data as well as the image through the sentinelsat api.

def runSARdownload_metadata(boundingbox, area, resolution=(30, 30),
                            start_date="2018-07-01", end_date="2018-08-30", 
                            crs=CRS.WGS84, config=None, 
                            SENT1_OUTPUT_FOLDER="output"):
    
    print(f"Running SAR downloader for area: {area}")
    bbox = box(boundingbox[0], boundingbox[1], boundingbox[2], boundingbox[3])
    assert isinstance(bbox, Polygon)

    tile_splits = UtmZoneSplitter([bbox], crs, (50000, 50000))
    export_folder = os.path.join(SENT1_OUTPUT_FOLDER, area)
    
    if not os.path.exists(export_folder):
        os.makedirs(export_folder)

    evalscript = """
                //VERSION=3
                function setup() {
                return {input: ["VV"],
                        output: { id:"default", bands: 1, sampleType: SampleType.FLOAT32}}}
                function evaluatePixel(samples) {return [samples.VV]}
                """
    
    def isTractAvailable(collection, tile_bbox, start_date, end_date):
        available = False
        search_results = []
        try:
            search_results = SentinelHubCatalog().search(
                                                        collection=collection,
                                                        bbox=tile_bbox,
                                                        time=(start_date, end_date),
                                                        limit=None)
            if search_results:
                available = True
        except Exception as e:
            print(f"Error checking data availability: {e}")
            available = False
        return available, search_results

    def extract_required_metadata(search_results):
        search_results_list = list(search_results)[0]
        metadata = {
                    'id': search_results_list['id'],
                    'date_time': search_results_list['properties']['datetime'],
                    'orbit_state': search_results_list['properties']['sat:orbit_state'],
                    'platform': search_results_list['properties']['platform'],
                    'resolution_azimuth': search_results_list['properties']['sar:resolution_azimuth']
                    }
        return metadata
    
    def store_metadata(search_results, directory):
        metadata = extract_required_metadata(search_results)
        metadata_file = os.path.join(directory, "detailed_metadata.json")
        with open(metadata_file, 'w') as f:
            json.dump(metadata, f)

    def getSubArea(bbox, collection, mosaic_order):
        return SentinelHubRequest(
            evalscript=evalscript,
            input_data=[
                SentinelHubRequest.input_data(
                    data_collection=collection,
                    time_interval=(start_date, end_date),
                    mosaicking_order=mosaic_order,
                    other_args={
                            "processing": {
                                        "backCoeff": "SIGMA0_ELLIPSOID",
                                        }})],
            responses=[
                SentinelHubRequest.output_response("default", MimeType.TIFF),
            ],
            bbox=bbox,
            resolution=resolution,
            data_folder=export_folder,
            config=config,)

    bbox_list = tile_splits.get_bbox_list()
    available_tiles = {}
    search_results_list = []

    for collection in [DataCollection.SENTINEL1_IW_ASC, DataCollection.SENTINEL1_IW_DES]:
        available_tiles[collection] = []
        for tile_bbox in bbox_list:
            available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
            if available:
                available_tiles[collection].append(tile_bbox)

    sh_requests = []
    for collection, available_tile_bboxes in available_tiles.items():
        mosaic_order = MosaickingOrder.MOST_RECENT if collection == DataCollection.SENTINEL1_IW_DES else MosaickingOrder.MOST_RECENT

        for tile_bbox in available_tile_bboxes:
            available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
            if available:
                search_results_list.append(search_results)
                request = getSubArea(tile_bbox, collection, mosaic_order)
                sh_requests.append(request)
    
    
    if sh_requests: 
        dl_requests = [request.download_list[0] for request in sh_requests]
        pdb.set_trace()
        downloaded_data = SentinelHubDownloadClient(config=config).download(dl_requests, max_threads=5)
    else:
        print("No download requests were generated. Check the tile availability and other parameters.")

    for idx, (collection, available_tile_bboxes) in enumerate(available_tiles.items()):
        for tile_bbox in available_tile_bboxes:
            store_metadata(search_results_list[idx], export_folder)



if __name__ == "__main__":


    # Set up sentinel-hub api
    config = SHConfig()
    config.sh_client_id = ###
    config.sh_client_secret = ###
    config.instance_id = ###
    config.save()
    

    ROOT = 'data/' 
    SENT1_OUTPUT_FOLDER = ROOT + 'S1GRD_metadata/'
    bbox = (-118.26, 36.88, -117.76, 37.38)
    area = 'DV_Test'
    test = SENT1_OUTPUT_FOLDER+area

    runSARdownload_metadata(bbox,area,config=config)

I am worried I am being really stupid but I put a pdb statement in to debug and there is dl_requests etc. but they just do not download. It doesn’t download anything at all. This means the metadata isn’t saved either (it relies on the export folder being created from the download).

Running SAR downloader for area: DV_Test
> /Users/###/Documents/projects/###/src/dataDownloaders.py(215)runSARdownload_metadata()
-> downloaded_data = SentinelHubDownloadClient(config=config).download(dl_requests, max_threads=5)
(Pdb) dl_requests[0]
DownloadRequest(url='https://services.sentinel-hub.com/api/v1/process', headers={'content-type': 'application/json', 'accept': 'image/tiff'}, request_type=<RequestType.POST: 'POST'>, post_values={'input': {'bounds': {'properties': {'crs': 'http://www.opengis.net/def/crs/EPSG/0/32611'}, 'bbox': [350000.0, 4050000.0, 400000.0, 4100000.0]}, 'data': [InputDataDict({'type': 'sentinel-1-grd', 'dataFilter': {'timeRange': {'from': '2018-07-01T00:00:00Z', 'to': '2018-08-30T23:59:59Z'}, 'mosaickingOrder': 'mostRecent', 'acquisitionMode': 'IW', 'polarization': 'DV', 'resolution': 'HIGH', 'orbitDirection': 'ASCENDING'}, 'processing': {'backCoeff': 'SIGMA0_ELLIPSOID'}}, service_url=https://services.sentinel-hub.com)]}, 'evalscript': '\n                //VERSION=3\n                function setup() {\n                return {input: ["VV"],\n                        output: { id:"default", bands: 1, sampleType: SampleType.FLOAT32}}}\n                function evaluatePixel(samples) {return [samples.VV]}\n                ', 'output': {'responses': [{'identifier': 'default', 'format': {'type': 'image/tiff'}}], 'resx': 30, 'resy': 30}}, use_session=True, data_type=<MimeType.TIFF: 'tiff'>, save_response=True, data_folder='output/DV_Test', filename=None, return_data=True, extra_params={})
(Pdb) len(dl_requests)
8
(Pdb) sh_requests
[<sentinelhub.api.process.SentinelHubRequest object at 0x168d2d540>, <sentinelhub.api.process.SentinelHubRequest object at 0x168d2d900>, <sentinelhub.api.process.SentinelHubRequest object at 0x168d2da80>, <sentinelhub.api.process.SentinelHubRequest object at 0x168d2dc00>, <sentinelhub.api.process.SentinelHubRequest object at 0x168d2dd80>, <sentinelhub.api.process.SentinelHubRequest object at 0x168d2df00>, <sentinelhub.api.process.SentinelHubRequest object at 0x168d2e080>, <sentinelhub.api.process.SentinelHubRequest object at 0x168d2e200>]
(Pdb) sh_requests[0]
<sentinelhub.api.process.SentinelHubRequest object at 0x168d2d540>
(Pdb) len(sh_requests) 
8

I have over plenty of processing units, requests, and have paid for account. Is there something I am doing that’s causing this? Any help would be awesome!!

Thanks!!

Hi William,

I’m not entirely sure what your script and functions are doing, but the first thing I would recommend trying is testing your Process API request in isolation. If it works, then you can find out if it something wrong with the Sentinel Hub Request before proceeding further. Let me know how you get on :slight_smile:

In addition, if you are using sentinelsat that obtains data from the Copernicus SciHub service, this is something that I don’t have experience using as it is outside the Sentinel Hub APIs. At this moment I think it is important to tell you too that the Copernicus Scihub is being replaced by the Copernicus Data Space Ecosystem in the near future, so it maybe better to build your functions around this service instead.

Hi William,

Thanks for the reply! I’ve actually rerun it this morning with a few edits and it works…

Sorry, I should have put an explainer as the function is a mess!

The idea is to take a large area, e.g., North Africa, break it up into smaller tiles, then for each tile, check whether both ascending and descending sentinel 1 GRD tracts and collect the metadata from all products that meet the search criteria in the SentinelHubCatalog().search. This is done at tile level as the areas are large enough that they have partial coverage of both tracts etc. This is the isTractAvailable() method and this part of the code:

    for collection in [DataCollection.SENTINEL1_IW_ASC, DataCollection.SENTINEL1_IW_DES]:
        available_tiles[collection] = []
        for tile_bbox in bbox_list:
            available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
            if available:
                available_tiles[collection].append(tile_bbox)

getSubArea() gets the relevant requests for available S1GRD tiles etc., selecting the most recent where there is multiple S1GRD passes for the same tile over the time range.

    sh_requests = []
    for collection, available_tile_bboxes in available_tiles.items():
        mosaic_order = MosaickingOrder.MOST_RECENT if collection == DataCollection.SENTINEL1_IW_DES else MosaickingOrder.MOST_RECENT

        for tile_bbox in available_tile_bboxes:
            available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
            if available:
                search_results_list.append(search_results)
                request = getSubArea(tile_bbox, collection, mosaic_order)
                sh_requests.append(request)

Where is gets a bit messy is repeatedly calling the isTractAvailable method. This is as I really struggled to get detailed metadata from the evalscript and SentinelHubRequest by following the guidance here. So, I used the SentinelHubCatalog().search() and some indexing to ensure I only wrote the metadata for downloaded tiles to .json files. It’s messy but it now works. Here, is the now working code if anyone else is interested in similar things:

def runSARdownload_metadata(boundingbox, area, resolution=(30, 30),
                            start_date="2018-07-01", end_date="2018-08-30", 
                            crs=CRS.WGS84, config=None, 
                            SENT1_OUTPUT_FOLDER="output"):
    
    print(f"Running SAR downloader for area: {area}")
    bbox = box(boundingbox[0], boundingbox[1], boundingbox[2], boundingbox[3])
    assert isinstance(bbox, Polygon)

    tile_splits = UtmZoneSplitter([bbox], crs, (50000, 50000))
    export_folder = os.path.join(SENT1_OUTPUT_FOLDER, area)
    
    if not os.path.exists(export_folder):
        os.makedirs(export_folder)

    evalscript = """
                //VERSION=3
                function setup() {
                return {input: ["VV"],
                        output: { id:"default", bands: 1, sampleType: SampleType.FLOAT32}}}
                function evaluatePixel(samples) {return [samples.VV]}
                """
    
    def isTractAvailable(collection, tile_bbox, start_date, end_date):
        available = False
        search_results = []
        try:
            search_results = SentinelHubCatalog().search(
                                                        collection=collection,
                                                        bbox=tile_bbox,
                                                        time=(start_date, end_date),
                                                        limit=None)
            if search_results:
                available = True
        except Exception as e:
            print(f"Error checking data availability: {e}")
            available = False
        return available, search_results

    def extract_required_metadata(search_results):
        search_results_list = list(search_results)[0]
        metadata = {
                    'id': search_results_list['id'],
                    'date_time': search_results_list['properties']['datetime'],
                    'orbit_state': search_results_list['properties']['sat:orbit_state'],
                    'platform': search_results_list['properties']['platform'],
                    'resolution_azimuth': search_results_list['properties']['sar:resolution_azimuth']
                    }
        return metadata
    
    def store_metadata(search_results, directory):
        metadata = extract_required_metadata(search_results)
        metadata_file = os.path.join(directory, "detailed_metadata.json")
        with open(metadata_file, 'w') as f:
            json.dump(metadata, f)

    def getSubArea(bbox, collection, mosaic_order):
        orbit_direction = 'ASC' if collection == DataCollection.SENTINEL1_IW_ASC else 'DES'
        coordinate = str(tile_bbox.lower_left[0])[:3]+str(tile_bbox.lower_left[1])[:3]
        tile_folder_name = f"{orbit_direction}_{coordinate}"
        tile_folder = os.path.join(export_folder, tile_folder_name)
        if not os.path.exists(tile_folder):
            os.makedirs(tile_folder)
        return SentinelHubRequest(
            evalscript=evalscript,
            input_data=[
                SentinelHubRequest.input_data(
                    data_collection=collection,
                    time_interval=(start_date, end_date),
                    mosaicking_order=mosaic_order,
                    other_args={
                            "processing": {
                                        "backCoeff": "SIGMA0_ELLIPSOID",
                                        }})],
            responses=[
                SentinelHubRequest.output_response("default", MimeType.TIFF),
            ],
            bbox=bbox,
            resolution=resolution,
            data_folder=tile_folder,
            config=config,)

    bbox_list = tile_splits.get_bbox_list()
    available_tiles = {}
    search_results_list = []

    for collection in [DataCollection.SENTINEL1_IW_ASC, DataCollection.SENTINEL1_IW_DES]:
        available_tiles[collection] = []
        for tile_bbox in bbox_list:
            available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
            if available:
                available_tiles[collection].append(tile_bbox)

    sh_requests = []
    for collection, available_tile_bboxes in available_tiles.items():
        mosaic_order = MosaickingOrder.MOST_RECENT if collection == DataCollection.SENTINEL1_IW_DES else MosaickingOrder.MOST_RECENT

        for tile_bbox in available_tile_bboxes:
            available, search_results = isTractAvailable(collection, tile_bbox, start_date, end_date)
            if available:
                search_results_list.append(search_results)
                request = getSubArea(tile_bbox, collection, mosaic_order)
                sh_requests.append(request)
    
    
    if sh_requests: 
        dl_requests = [request.download_list[0] for request in sh_requests]
        downloaded_data = SentinelHubDownloadClient(config=config).download(dl_requests, max_threads=10)
    else:
        print("No download requests were generated. Check the tile availability and other parameters.")

    for idx, (collection, available_tile_bboxes) in enumerate(available_tiles.items()):
        for tile_bbox in available_tile_bboxes:
            orbit_direction = 'ASC' if collection == DataCollection.SENTINEL1_IW_ASC else 'DES'
            coordinate = str(tile_bbox.lower_left[0])[:3]+str(tile_bbox.lower_left[1])[:3]
            tile_folder_name = f"{orbit_direction}_{coordinate}"
            tile_folder = os.path.join(export_folder, tile_folder_name)
            store_metadata(search_results_list[idx], tile_folder)


if __name__ == "__main__":


    # Set up sentinel-hub api
    config = SHConfig()
    config.sh_client_id = ###
    config.sh_client_secret = ###
    config.instance_id = ###
    config.save()
    

    ROOT = os.path.abspath('data/') 
    SENT1_OUTPUT_FOLDER = os.path.join(ROOT, 'S1GRD_metadata/')
    os.makedirs(SENT1_OUTPUT_FOLDER, exist_ok=True)
    bbox = (-118.26, 36.88, -117.76, 37.38)
    area = 'DV_Test'
    test = os.path.join(SENT1_OUTPUT_FOLDER, area)

    runSARdownload_metadata(bbox, area, config=config, SENT1_OUTPUT_FOLDER=SENT1_OUTPUT_FOLDER)

Also, thanks for the on the Copernicus Data Space Ecosystem. Do you know a timeframe for this? And what does this mean sentinel-hub-py?

Hi William, the timeframes are not set in stone but there is some more information about this here. In addition, access to Copernicus mission data via Sentinel Hub API is also possible with the Copernicus Data Space Ecosystem. This means you can use sentinel-hub-py to achieve this. I’d recommend going through this notebook authored by myself to see how you can do this.

I was trying your code in ice_monitoring.ipynb but don’t work for me, the return data is always a nxmxb with zeros.

b=number of bands (VV,VH,datamask)

Hello guys, I’m also trying to download the sentinel1 GRD products and I was only able to do it through Terrascope OpenEO API but it’s limited to 200x200 km, and the issue is that the polygon should be aligned with the latitude and longitude axes, for me my goal is to download the sentinel1 GRD in tiles as they are in the Copernicus browser 250x170km and not aligned with latitude and longitude axes.