So my code is failing at certain geo locations with below error can anyone help me to resolve this issue at earliest

My error

2023-03-09 11:39:52,619 INFO Welcome to the CDS
2023-03-09 11:39:52,619 INFO Sending request to https://cds.climate.copernicus.eu/api/v2/resources/reanalysis-era5-single-levels
2023-03-09 11:39:52,832 INFO Request is queued
2023-03-09 11:39:53,996 INFO Request is running
2023-03-09 11:39:55,665 INFO Request is failed
2023-03-09 11:39:55,666 ERROR Message: the request you have submitted is not valid
2023-03-09 11:39:55,666 ERROR Reason:  Exception: Assertion failed: bbox.north() <= Latitude::NORTH_POLE in check, line 34 of /home/cds/git/mars-client/mir/src/mir/util/BoundingBox.cc; MIR: Assertion failed: bbox.north() <= Latitude::NORTH_POLE in check, line 34 of /home/cds/git/mars-client/mir/src/mir/util/BoundingBox.cc; Interpolation failed (-2); Mars server task finished in error; Double buffer error: Assertion failed: length == buffers_[i].length_ in run, line 282 of /home/cds/git/mars-server/eckit/src/eckit/io/DblBuffer.cc(RemoteException from Connector[mvr0010,marsmvr-0010:9701]) [mars]; Error code is -2; Request failed; Some errors reported (last error -2)
2023-03-09 11:39:55,666 ERROR   Traceback (most recent call last):
2023-03-09 11:39:55,666 ERROR     File "/opt/cdstoolbox/cdscompute/cdscompute/cdshandlers/services/handler.py", line 59, in handle_request
2023-03-09 11:39:55,667 ERROR       result = cached(context.method, proc, context, context.args, context.kwargs)
2023-03-09 11:39:55,667 ERROR     File "/opt/cdstoolbox/cdscompute/cdscompute/caching.py", line 108, in cached
2023-03-09 11:39:55,667 ERROR       result = proc(context, *context.args, **context.kwargs)
2023-03-09 11:39:55,667 ERROR     File "/opt/cdstoolbox/cdscompute/cdscompute/services.py", line 124, in __call__
2023-03-09 11:39:55,667 ERROR       return p(*args, **kwargs)
2023-03-09 11:39:55,668 ERROR     File "/opt/cdstoolbox/cdscompute/cdscompute/services.py", line 60, in __call__
2023-03-09 11:39:55,668 ERROR       return self.proc(context, *args, **kwargs)
2023-03-09 11:39:55,668 ERROR     File "/home/cds/cdsservices/services/mars/mars.py", line 48, in internal
2023-03-09 11:39:55,668 ERROR       return mars(context, request, **kwargs)
2023-03-09 11:39:55,668 ERROR     File "/home/cds/cdsservices/services/mars/mars.py", line 20, in mars
2023-03-09 11:39:55,668 ERROR       execute_mars(context, requests, info)
2023-03-09 11:39:55,669 ERROR     File "/home/cds/cdsservices/services/mars/execute_mars.py", line 20, in execute_mars
2023-03-09 11:39:55,669 ERROR       exception=MarsException)
2023-03-09 11:39:55,669 ERROR     File "/opt/cdstoolbox/cdscompute/cdscompute/context.py", line 209, in run_command
2023-03-09 11:39:55,669 ERROR       raise exception(call, proc.returncode, output)
2023-03-09 11:39:55,669 ERROR   home.cds.cdsservices.services.mars.__init__.py.exceptions.MarsException: Exception: Assertion failed: bbox.north() <= Latitude::NORTH_POLE in check, line 34 of /home/cds/git/mars-client/mir/src/mir/util/BoundingBox.cc; MIR: Assertion failed: bbox.north() <= Latitude::NORTH_POLE in check, line 34 of /home/cds/git/mars-client/mir/src/mir/util/BoundingBox.cc; Interpolation failed (-2); Mars server task finished in error; Double buffer error: Assertion failed: length == buffers_[i].length_ in run, line 282 of /home/cds/git/mars-server/eckit/src/eckit/io/DblBuffer.cc(RemoteException from Connector[mvr0010,marsmvr-0010:9701]) [mars]; Error code is -2; Request failed; Some errors reported (last error -2)
Error fetching data for area['35.14439832499524/33.90585241491622/35.14462956749524/33.90608365741622']: the request you have submitted is not valid. Exception: Assertion failed: bbox.north() <= Latitude::NORTH_POLE in check, line 34 of /home/cds/git/mars-client/mir/src/mir/util/BoundingBox.cc; MIR: Assertion failed: bbox.north() <= Latitude::NORTH_POLE in check, line 34 of /home/cds/git/mars-client/mir/src/mir/util/BoundingBox.cc; Interpolation failed (-2); Mars server task finished in error; Double buffer error: Assertion failed: length == buffers_[i].length_ in run, line 282 of /home/cds/git/mars-server/eckit/src/eckit/io/DblBuffer.cc(RemoteException from Connector[mvr0010,marsmvr-0010:9701]) [mars]; Error code is -2; Request failed; Some errors reported (last error -2).



My Code:

#working for 92 logs

import cdsapi
import pandas as pd

df1 = pd.read_csv('PX4_logData_skip_na_fromtimestamp_v3.csv', usecols=['logNum', 'Time', 'Date'])
df3 = pd.read_csv('timeconvdatav4.csv', usecols=['logNum'])
df1 = df1[:100]
path1 = 'D:/Abhishek/UBC/MASc/Thesis/Energy_Modeling/Data/'
path2 = 'D:/Abhishek/UBC/MASc/Thesis/Energy_Modeling/WeathERA5Single/'
logIds = df3['logNum'].tolist()
#batfile = '/log_battery_status_0.csv'
notFoundList = []
#date = df1['Date'].tolist()
#time = df1['Time'].tolist()
month = int()
day = int()
year = int()
hr = int()
min = int()
i = 0
common_df = pd.DataFrame()
    #ht = df2['terrain_alt']
    #time = df1['Time']
#working till mean() function

for logId in logIds:
    #picks the specific time and date from PX4_logData_skip_na_fromtimestamp_v3.csv by comparing it with timeconvdatav4.csv log id
    time1 = df1[df1['logNum'] == logId]['Time']
    date1 = df1[df1['logNum'] == logId]['Date']
    date0 = date1[i]
    month, day, year = map(int,date0.split('/'))
    print(month, day, year)
    time0 = time1[i]
    hr, min, sec = map(int,time0.split(':'))
    i += 1
    #for logId in logIds:
    path = path1 + logId
    gpsfile = '/log_vehicle_global_position_0.csv'
    from os.path import exists
    if not exists(path + gpsfile):
        notFoundList.append(logId)
        continue
    df2 = pd.read_csv(path + gpsfile)
    lat = df2['lat']
    long = df2['lon']
    alt = df2["terrain_alt"].mean()
    #print (hr, min, sec)
        # Create an empty list to store the area grid strings
    #area_grid = []
    import math
    #import matplotlib.pyplot as plt
    import builtins

    # Example usage:
    latitudes = df2['lat'].tolist()
    longitudes = df2['lon'].tolist()


    def distance(lat1, lon1, lat2, lon2):
        """
        Calculates the great-circle distance between two points on the earth's surface
        using the Haversine formula.
        """
        R = 6371.0  # radius of the earth in kilometers
        dlat = math.radians(lat2 - lat1)
        dlon = math.radians(lon2 - lon1)
        a = math.sin(dlat / 2) ** 2 + math.cos(math.radians(lat1)) * math.cos(math.radians(lat2)) * math.sin(
            dlon / 2) ** 2
        c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
        dist = R * c
        return dist
    def create_grid(latitudes, longitudes, min_lat=-90, max_lat=90, min_lon=-180, max_lon=180):
        """
        Creates a grid of cells covering a larger area specified by the input latitudes and longitudes,
        with each cell having a size determined by the distance between the minimum and maximum latitudes
        and longitudes in the input data.

        Args:
        latitudes: list of latitude values
        longitudes: list of longitude values
        min_lat: minimum latitude value for the grid, default -90
        max_lat: maximum latitude value for the grid, default 90
        min_lon: minimum longitude value for the grid, default -180
        max_lon: maximum longitude value for the grid, default 180

        Returns:
        grid_cells: list of tuples representing grid cells, where each tuple contains (min_lat, min_lon, max_lat, max_lon)
        grid_size: size of each grid cell in kilometers
        """
        # Limit latitudes and longitudes to fall within the specified range
        latitudes = [builtins.min(builtins.max(min_lat, lat), max_lat) for lat in latitudes]
        longitudes = [builtins.min(builtins.max(min_lon, lon), max_lon) for lon in longitudes]


        min_latitude, max_latitude = builtins.min(latitudes), builtins.max(latitudes)
        min_longitude, max_longitude = builtins.min(longitudes), builtins.max(longitudes)

        # Limit max_latitude and max_longitude to their respective maximum values
        max_latitude = builtins.min(max_latitude, 90)
        max_longitude = builtins.min(max_longitude, 180)
        #max_latitude = min(max_latitude, 90)
        #max_longitude = min(max_longitude, 180)

        # Limit min_longitude to its minimum value of -180
        if min_longitude < -180:
            min_longitude = -180

        # Limit max_longitude to its maximum value of 180
        if max_longitude > 180:
            max_longitude = 180

        lat_dist = distance(min_latitude, min_longitude, max_latitude, min_longitude)
        lon_dist = distance(min_latitude, min_longitude, min_latitude, max_longitude)
        #changed the roundup issue to 10 it was failing due to grid size endingup as zero
        grid_size = round(builtins.min(lat_dist, lon_dist) / 5.0,10)
        test = builtins.min(lat_dist,lon_dist)
        grid_cells = []
        lat = min_latitude
      #debuging individual iteration use below format
       # if i == 94:
       #    print()

        while lat < max_latitude:
            lon = min_longitude
            while lon < max_longitude:
                cell_lat1, cell_lon1 = lat, lon
                cell_lat2, cell_lon2 = lat + grid_size, lon + grid_size
                grid_cells.append((cell_lat1, cell_lon1, cell_lat2, cell_lon2))
                lon += grid_size
            lat += grid_size
        return grid_cells, grid_size
        # Create a grid of cells covering the input data with a size based on the distance between the minimum and maximum latitudes and longitudes in the input data


    grid_cells, grid_size = create_grid(latitudes, longitudes)
    print(f"Created {len(grid_cells)} grid cells with a size of {grid_size} km")

    import json
    # Define the request variable
    request = {'area': []}
    # Create an area grid string for the current latitude and longitude
    area_grid = []
    #if len(grid_cells) > 0:
        #print(f"Created {len(grid_cells)} grid cells with sizes of {grid_size} km each")
        #rint(i)
    for cell in grid_cells:
        lat, lon, _, _ = cell
        area_grid.append(f'{lat}/{lon}/{lat + grid_size}/{lon + grid_size}')
        #break  # add this line to restrict to the first element
        # Add the area grid string to the request variable
        #request['area'].append(area_grid)
        # Convert the request variable to a JSON-formatted string

        #request_string = json.dumps(request)
        #print(request_string)
        # Print the resulting area grid list
        print(area_grid)
        import psutil
        #print(time)
        # if grid_size >< 4

        try:
            c = cdsapi.Client()

            # the below line fetches data T-1hr, T hr and T+1hr
            time = [str((hr - 1) % 24).zfill(2) + ":00", str(hr).zfill(2) + ":00",
                    str((hr + 1) % 24).zfill(2) + ":00"]

            print(time)
            # if grid_size >< 4
            print(grid_size)

            data = c.retrieve(
                'reanalysis-era5-single-levels',
                {
                    'product_type': 'reanalysis',
                    # ERA5 - single variables
                    'variable': ['100m_u_component_of_wind', '10m_u_component_of_wind',
                                 '10m_v_component_of_neutral_wind',
                                 '10m_v_component_of_wind', '10m_wind_gust_since_previous_post_processing',
                                 '2m_dewpoint_temperature', '2m_temperature', 'evaporation', 'friction_velocity',
                                 'instantaneous_10m_wind_gust', 'k_index',
                                 'maximum_2m_temperature_since_previous_post_processing',
                                 'minimum_2m_temperature_since_previous_post_processing', 'skin_temperature',
                                 'surface_net_thermal_radiation', 'surface_pressure', 'surface_sensible_heat_flux',
                                 'surface_solar_radiation_downward_clear_sky', 'total_cloud_cover',
                                 'total_column_water',
                                 'total_column_water_vapour', 'total_precipitation', 'type_of_high_vegetation',
                                 'type_of_low_vegetation'],
                    # 'temperature_of_snow_layer', 'snowfall','snow_density', 'snow_evaporation',
                    'year': year,
                    'month': month,
                    'day': day,
                    'time': time,
                    'format': 'netcdf',
                    'area': area_grid,
                    'grid': '{}/{}'.format(grid_size, grid_size)
                    # 'grid': '{}/{}'.format(grid_size_values, grid_size_values)
                    # 'grid': '0.05/0.05', #changing this will change the number of samples downloaded
                    # step is not working
                    # 'step': '1/1440'

                },
                'download.nc')

            #print(data)

            # process the downloaded data here

            import xarray as xr

            ds = xr.open_dataset('download.nc')
            # Check RAM usage
            mem = psutil.virtual_memory()
            #if mem.used > 14 * 1024 * 1024 * 1024:  # 14GB in bytes
                #print(mem.used)
                #print("Skipping iteration due to high RAM usage.")
                #continue
            ds.sel(time=ds.time[0], latitude=ds.latitude[0], longitude=ds.longitude[0])
            print(ds.time)
            df = ds.to_dataframe()
            df["logid"] = logId
            # set the time column
            df['time'] = f"{hr:02d}:{min:02d}:{sec:02d}"
            df["date"] = date0
            df["lat"] = lat
            df["lon"] = lon
            # below line not working
            # df["area_grid"] = area_grid

            common_df = pd.concat([common_df, df], ignore_index=True)
            newdf2 = path2 + "GridAct_weatherdataTpnm1_era5Mar08_v24.csv"
            common_df.to_csv(newdf2, index=False)
            # this works roll back if above concat doesn't work
            # newdf = path2 + logId + "_weatherdata_era5feb07.csv"
            # Save the merged DataFrame to a new CSV file
            # df.to_csv(newdf, index=False)

            # print(df)
            # df.to_csv('weatherdata_era5land_feb06.csv')
        except KeyError:
            print("Error fetching data from CDS API. Skipping to the next iteration.")
            continue
        except Exception as e:
            print(f"Error fetching data for area{area_grid}: {e}")
            continue

        # use grid_size values in API request
        # ...
    #else:
        #print(f"Created {len(grid_cells)} grid cell with a size of {grid_size} km")