In [1]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import scipy
from scipy.optimize import minimize_scalar
import xarray as xr
from tqdm import tqdm

import function_GLOBCOASTS as FUNC

import hvplot.pandas
import hvplot.xarray

import holoviews as hv

hv.extension("bokeh")
In [2]:
SEADATA_PATH = "SEADATA_14140pts_1993_2019-analysed.nc"
BQART_PATH = "Sorties_Bqart.txt"
TIDE_PATH = "Tide_Glob.mat"
DEPTH_OF_CLOSURE = "Dc.nc"
VALIDATION_DATA = 'Shorelines_global_20231101_shift.mat'
In [3]:
# SOLID RIVER DISCHARGE CALCULATED THROUGH BQART FORMULA WITH Te = 0.2 [Mt/yr]
bqart_brut   = pd.read_csv(BQART_PATH, delimiter=';', header=None, names = ["inflow", "outflow"])
bqart_brut
bqart_m3     = bqart_brut["inflow"]/2.650 #to convert it to m3
bqart        = bqart_m3/12            # m3/yr --> m3/month
bqart
Out[3]:
inflow outflow
0 1.505870e+07 12046960.00
1 2.707426e+05 216594.00
2 5.095856e+05 407668.50
3 3.068417e+04 24547.34
4 2.582488e+07 20659910.00
... ... ...
14135 3.858788e+03 3087.03
14136 0.000000e+00 0.00
14137 0.000000e+00 0.00
14138 0.000000e+00 0.00
14139 0.000000e+00 0.00

14140 rows × 2 columns

Out[3]:
0        473544.025157
1          8513.918239
2         16024.704403
3           964.911006
4        812103.144654
             ...      
14135       121.345535
14136         0.000000
14137         0.000000
14138         0.000000
14139         0.000000
Name: inflow, Length: 14140, dtype: float64

load SEADATA NetCDF dataset

In [4]:
seadata = xr.open_dataset(SEADATA_PATH,engine='netcdf4')
seadata
Out[4]:
<xarray.Dataset> Size: 9GB
Dimensions:                                     (t: 9861, pos: 14140,
                                                 mounth: 324, mounths: 324)
Coordinates:
    mounth                                      (mounths) float64 3kB ...
Dimensions without coordinates: t, pos, mounths
Data variables: (12/37)
    sla                                         (t, pos) float64 1GB ...
    dac                                         (t, pos) float64 1GB ...
    lat                                         (pos) float64 113kB ...
    lon                                         (pos) float64 113kB ...
    dac_mounthly                                (mounth, pos) float64 37MB ...
    sla_mounthly                                (mounth, pos) float64 37MB ...
    ...                                          ...
    E_waves_normalised                          (mounth, pos) float64 37MB ...
    rivdis                                      (t, pos) float64 1GB ...
    rivdis_mounthly                             (mounth, pos) float64 37MB ...
    rivdis_detrend                              (mounth, pos) float64 37MB ...
    rivdis_detrend_mounth_season_mean_removed   (mounth, pos) float64 37MB ...
    rivdis_normalised                           (mounth, pos) float64 37MB ...
xarray.Dataset
    • t: 9861
    • pos: 14140
    • mounth: 324
    • mounths: 324
    • mounth
      (mounths)
      float64
      ...
      [324 values with dtype=float64]
    • sla
      (t, pos)
      float64
      ...
      [139434540 values with dtype=float64]
    • dac
      (t, pos)
      float64
      ...
      [139434540 values with dtype=float64]
    • lat
      (pos)
      float64
      ...
      [14140 values with dtype=float64]
    • lon
      (pos)
      float64
      ...
      [14140 values with dtype=float64]
    • dac_mounthly
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • sla_mounthly
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • dac_detrend
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • sla_detrend
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • dac_detrend_mounth_season_mean_removed
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • sla_detrend_mounth_season_mean_removed
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • dac_normalised
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • sla_normalised
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • dir
      (t, pos)
      float64
      ...
      [139434540 values with dtype=float64]
    • Hs
      (t, pos)
      float64
      ...
      [139434540 values with dtype=float64]
    • E_waves
      (t, pos)
      float64
      ...
      [139434540 values with dtype=float64]
    • Tp
      (t, pos)
      float64
      ...
      [139434540 values with dtype=float64]
    • Tp_mounthly
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • Hs_mounthly
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • dir_mounthly
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • E_waves_mounthly
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • Tp_detrend
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • Tp_detrend_mounth_season_mean_removed
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • Tp_normalised
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • Hs_detrend
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • Hs_detrend_mounth_season_mean_removed
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • Hs_normalised
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • dir_detrend
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • dir_detrend_mounth_season_mean_removed
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • dir_normalised
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • E_waves_detrend
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • E_waves_detrend_mounth_season_mean_removed
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • E_waves_normalised
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • rivdis
      (t, pos)
      float64
      ...
      [139434540 values with dtype=float64]
    • rivdis_mounthly
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • rivdis_detrend
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • rivdis_detrend_mounth_season_mean_removed
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]
    • rivdis_normalised
      (mounth, pos)
      float64
      ...
      [4581360 values with dtype=float64]

    fix time

    In [5]:
    time_vec = pd.date_range("1993", "2019-12-31")
    assert(len(time_vec) == len(seadata.t))
    # so seadata_continents.t are days from 1993 until 2020
    seadata = seadata.assign_coords(t = time_vec)
    seadata
    
    Out[5]:
    <xarray.Dataset> Size: 9GB
    Dimensions:                                     (t: 9861, pos: 14140,
                                                     mounth: 324, mounths: 324)
    Coordinates:
        mounth                                      (mounths) float64 3kB ...
      * t                                           (t) datetime64[ns] 79kB 1993-...
    Dimensions without coordinates: pos, mounths
    Data variables: (12/37)
        sla                                         (t, pos) float64 1GB ...
        dac                                         (t, pos) float64 1GB ...
        lat                                         (pos) float64 113kB ...
        lon                                         (pos) float64 113kB ...
        dac_mounthly                                (mounth, pos) float64 37MB ...
        sla_mounthly                                (mounth, pos) float64 37MB ...
        ...                                          ...
        E_waves_normalised                          (mounth, pos) float64 37MB ...
        rivdis                                      (t, pos) float64 1GB ...
        rivdis_mounthly                             (mounth, pos) float64 37MB ...
        rivdis_detrend                              (mounth, pos) float64 37MB ...
        rivdis_detrend_mounth_season_mean_removed   (mounth, pos) float64 37MB ...
        rivdis_normalised                           (mounth, pos) float64 37MB ...
    xarray.Dataset
      • t: 9861
      • pos: 14140
      • mounth: 324
      • mounths: 324
      • mounth
        (mounths)
        float64
        ...
        [324 values with dtype=float64]
      • t
        (t)
        datetime64[ns]
        1993-01-01 ... 2019-12-31
        array(['1993-01-01T00:00:00.000000000', '1993-01-02T00:00:00.000000000',
               '1993-01-03T00:00:00.000000000', ..., '2019-12-29T00:00:00.000000000',
               '2019-12-30T00:00:00.000000000', '2019-12-31T00:00:00.000000000'],
              shape=(9861,), dtype='datetime64[ns]')
      • sla
        (t, pos)
        float64
        ...
        [139434540 values with dtype=float64]
      • dac
        (t, pos)
        float64
        ...
        [139434540 values with dtype=float64]
      • lat
        (pos)
        float64
        ...
        [14140 values with dtype=float64]
      • lon
        (pos)
        float64
        ...
        [14140 values with dtype=float64]
      • dac_mounthly
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • sla_mounthly
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • dac_detrend
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • sla_detrend
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • dac_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • sla_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • dac_normalised
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • sla_normalised
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • dir
        (t, pos)
        float64
        ...
        [139434540 values with dtype=float64]
      • Hs
        (t, pos)
        float64
        ...
        [139434540 values with dtype=float64]
      • E_waves
        (t, pos)
        float64
        ...
        [139434540 values with dtype=float64]
      • Tp
        (t, pos)
        float64
        ...
        [139434540 values with dtype=float64]
      • Tp_mounthly
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • Hs_mounthly
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • dir_mounthly
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • E_waves_mounthly
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • Tp_detrend
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • Tp_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • Tp_normalised
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • Hs_detrend
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • Hs_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • Hs_normalised
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • dir_detrend
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • dir_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • dir_normalised
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • E_waves_detrend
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • E_waves_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • E_waves_normalised
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • rivdis
        (t, pos)
        float64
        ...
        [139434540 values with dtype=float64]
      • rivdis_mounthly
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • rivdis_detrend
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • rivdis_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • rivdis_normalised
        (mounth, pos)
        float64
        ...
        [4581360 values with dtype=float64]
      • t
        PandasIndex
        PandasIndex(DatetimeIndex(['1993-01-01', '1993-01-02', '1993-01-03', '1993-01-04',
                       '1993-01-05', '1993-01-06', '1993-01-07', '1993-01-08',
                       '1993-01-09', '1993-01-10',
                       ...
                       '2019-12-22', '2019-12-23', '2019-12-24', '2019-12-25',
                       '2019-12-26', '2019-12-27', '2019-12-28', '2019-12-29',
                       '2019-12-30', '2019-12-31'],
                      dtype='datetime64[ns]', name='t', length=9861, freq='D'))

    fix land

    In [6]:
    seadata = seadata.assign_coords({"lon": seadata.lon, "lat": seadata.lat})
    seadata.Hs.isel(t=-1).hvplot.scatter(x="lon", y="lat", c="Hs") + seadata.Hs.isel(t=-1).isel(pos=slice(0,8841)).hvplot.scatter(x="lon", y="lat", c="Hs")
    
    Out[6]:
    In [7]:
    seadata_continents = seadata.isel(pos=slice(0, 8841))
    bqart_continents = bqart[:8841].values
    bqart_continents
    seadata_continents
    
    Out[7]:
    array([473544.02515723,   8513.91823899,  16024.70440252, ...,
             5334.67295597,   7015.93710692,   7419.99685535], shape=(8841,))
    Out[7]:
    <xarray.Dataset> Size: 6GB
    Dimensions:                                     (t: 9861, pos: 8841,
                                                     mounth: 324, mounths: 324)
    Coordinates:
        lat                                         (pos) float64 71kB ...
        lon                                         (pos) float64 71kB ...
        mounth                                      (mounths) float64 3kB ...
      * t                                           (t) datetime64[ns] 79kB 1993-...
    Dimensions without coordinates: pos, mounths
    Data variables: (12/35)
        sla                                         (t, pos) float64 697MB ...
        dac                                         (t, pos) float64 697MB ...
        dac_mounthly                                (mounth, pos) float64 23MB ...
        sla_mounthly                                (mounth, pos) float64 23MB ...
        dac_detrend                                 (mounth, pos) float64 23MB ...
        sla_detrend                                 (mounth, pos) float64 23MB ...
        ...                                          ...
        E_waves_normalised                          (mounth, pos) float64 23MB ...
        rivdis                                      (t, pos) float64 697MB ...
        rivdis_mounthly                             (mounth, pos) float64 23MB ...
        rivdis_detrend                              (mounth, pos) float64 23MB ...
        rivdis_detrend_mounth_season_mean_removed   (mounth, pos) float64 23MB ...
        rivdis_normalised                           (mounth, pos) float64 23MB ...
    xarray.Dataset
      • t: 9861
      • pos: 8841
      • mounth: 324
      • mounths: 324
      • lat
        (pos)
        float64
        ...
        [8841 values with dtype=float64]
      • lon
        (pos)
        float64
        ...
        [8841 values with dtype=float64]
      • mounth
        (mounths)
        float64
        ...
        [324 values with dtype=float64]
      • t
        (t)
        datetime64[ns]
        1993-01-01 ... 2019-12-31
        array(['1993-01-01T00:00:00.000000000', '1993-01-02T00:00:00.000000000',
               '1993-01-03T00:00:00.000000000', ..., '2019-12-29T00:00:00.000000000',
               '2019-12-30T00:00:00.000000000', '2019-12-31T00:00:00.000000000'],
              shape=(9861,), dtype='datetime64[ns]')
      • sla
        (t, pos)
        float64
        ...
        [87181101 values with dtype=float64]
      • dac
        (t, pos)
        float64
        ...
        [87181101 values with dtype=float64]
      • dac_mounthly
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • sla_mounthly
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • dac_detrend
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • sla_detrend
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • dac_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • sla_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • dac_normalised
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • sla_normalised
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • dir
        (t, pos)
        float64
        ...
        [87181101 values with dtype=float64]
      • Hs
        (t, pos)
        float64
        ...
        [87181101 values with dtype=float64]
      • E_waves
        (t, pos)
        float64
        ...
        [87181101 values with dtype=float64]
      • Tp
        (t, pos)
        float64
        ...
        [87181101 values with dtype=float64]
      • Tp_mounthly
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • Hs_mounthly
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • dir_mounthly
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • E_waves_mounthly
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • Tp_detrend
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • Tp_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • Tp_normalised
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • Hs_detrend
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • Hs_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • Hs_normalised
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • dir_detrend
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • dir_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • dir_normalised
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • E_waves_detrend
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • E_waves_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • E_waves_normalised
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • rivdis
        (t, pos)
        float64
        ...
        [87181101 values with dtype=float64]
      • rivdis_mounthly
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • rivdis_detrend
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • rivdis_detrend_mounth_season_mean_removed
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • rivdis_normalised
        (mounth, pos)
        float64
        ...
        [2864484 values with dtype=float64]
      • t
        PandasIndex
        PandasIndex(DatetimeIndex(['1993-01-01', '1993-01-02', '1993-01-03', '1993-01-04',
                       '1993-01-05', '1993-01-06', '1993-01-07', '1993-01-08',
                       '1993-01-09', '1993-01-10',
                       ...
                       '2019-12-22', '2019-12-23', '2019-12-24', '2019-12-25',
                       '2019-12-26', '2019-12-27', '2019-12-28', '2019-12-29',
                       '2019-12-30', '2019-12-31'],
                      dtype='datetime64[ns]', name='t', length=9861, freq='D'))
    In [64]:
    # ---------------------------------------------------------------------------------------
    # TIDE INPUT TO COMPUTE SLOPE THROUGH SUNAMURA 84 MODIFIED FORMULA (Arias et al., 2025)
    TIDE        = scipy.io.loadmat(TIDE_PATH)
    Tide_range  = TIDE['Tide_max'][0][:8841] #m
    Tide_range.shape
    
    Out[64]:
    (8841,)
    In [68]:
    # ---------------------------------------------------------------------------------------
    # DYNAMICAL DEPTH OF CLOSURE (Young, 1995)
    Dc        = xr.open_dataset(DEPTH_OF_CLOSURE,engine='netcdf4')
    DoC       = Dc['Dc'].values #m
    DoC.shape
    # returns (240, 8841)
    # here the Depth of closure does not match the time vector in SEADATA
    # So it must be a monthly value but we need to know from when to when
    
    # however all the other dataset are 324 length so DOC must be enlarged artifically
    
    pad_length = 324 - DoC.shape[0]
    DoC_padded = np.vstack([DoC, np.tile(DoC[-1:], (pad_length, 1))])
    DoC_padded.shape
    
    Out[68]:
    (240, 8841)
    Out[68]:
    (324, 8841)
    In [10]:
    # ---------------------------------------------------------------------------------------
    # WORLDWIDE CALIBRATION COEFFICIENT
    # ---------------------------------------------------------------------------------------
    # VALDIATION FILE : WATERLINE POSITION OBTAIN THANKS TO LANDSAT 7 AND 8 PRODUCTS
    Validation  = scipy.io.loadmat(VALIDATION_DATA)
    Xshores_val = Validation['X_safe'][:,108:-12]
    latX        = Validation['latX'][:]
    lonX        = Validation['lonX'][:]
    Xshores_VALIDATION = np.transpose((-1*Xshores_val))
    Xshores_VALIDATION.shape, latX.shape, lonX.shape
    # here the Validation matches the time vector in SEADATA > all good
    
    Out[10]:
    ((324, 8841), (8841, 1), (8841, 1))
    In [11]:
    # CONSTANT VALUES DECLARATION
    D50 = 10e-3         # Median grain size [m]
    PORO = 0.4          # Sand porosity
    ROHS = 2650         # Sand density [kg/m3]
    ROH = 1000          # Water density [kg/m3]
    G = 9.81            # gravitationnal acceleration [m/s2]
    R = 6371000         # Earth radius [m]
    NMONTHS = len(seadata.mounth)   # Number of time step
    NMONTHS
    
    Out[11]:
    324
    In [12]:
    # INPUT CALCULATION 
    # ---------------------------------------------------------------------------------------
    # TIDE CHECK FOR NAN VALUES
    for i in range(1,len(Tide_range)):
            if str(Tide_range[i]) == 'nan':
                Tide_range[i]=Tide_range[i-1]
                
    print("# - Tide range is cleaned")
    np.isnan(Tide_range).sum()
    
    # - Tide range is cleaned
    
    Out[12]:
    np.int64(0)
    In [13]:
    # ---------------------------------------------------------------------------------------
    # SOLID RIVER DISCHARGE VARIABILITY THROUGH THE USE OF ISBA-CTRIP DATASET 
    # 1. CLEAN LIQUID RIVER DISCHARGE DATASET (ISBA-CTRIP)
    seadata_continents['rivdis_mounthly'] = seadata_continents['rivdis_mounthly'].fillna(0)
    # 2. SOLID RIVER DISCHARGE VARIBILITY CALCUL
    QrivD = FUNC.RIVDIS(seadata_continents['rivdis_mounthly'].T,bqart_continents[0])
    np.isnan(QrivD).sum()
    QrivD.shape
    print("# - QrivD is ok")     
    
    Out[13]:
    np.int64(0)
    Out[13]:
    (8841, 324)
    # - QrivD is ok
    
    In [14]:
    # ---------------------------------------------------------------------------------------
    # TOTAL WATER LEVEL CALCUL
    # 1. Foreshore Beach slope calculation through SUNAMURA 84 modified by tide (Arias et al., 2025)
    
    def wl(Tp):
        return (G/(2*np.pi))*Tp**2
    def beta_fun(Tp, Hs):
        return 0.12 * ((np.sqrt(2*np.pi*D50*wl(Tp)))/(Hs+Tide_range))**0.5 #slope
    
    beta = beta_fun(seadata_continents["Tp_mounthly"], seadata_continents["Hs_mounthly"])
    beta.isnull().sum()
    print("# - Foreshore slopes is ok ")
    
    Out[14]:
    <xarray.DataArray ()> Size: 8B
    array(0)
    xarray.DataArray
    • 0
      array(0)
        # - Foreshore slopes is ok 
        
        In [15]:
        # 2. Wave set up (Stockdon,2006 ) -> Approximation of a linear slope
        def setup(Hs, Tp):
            return 0.35*beta*np.sqrt(Hs*wl(Tp)) #[m]
        
        setup_ = setup(seadata_continents["Tp_mounthly"], seadata_continents["Hs_mounthly"])
        setup_.isnull().sum()
        
        Out[15]:
        <xarray.DataArray ()> Size: 8B
        array(0)
        xarray.DataArray
        • 0
          array(0)
            In [16]:
            #3. Sum of sea level anomaly (SLA), storm surge (DAC) and wave set-up (SU)
            twl  = seadata_continents["sla_mounthly"] + seadata_continents["dac_mounthly"] + setup_
            twl.isnull().sum()
            print("# - Total water level is ok")
            
            Out[16]:
            <xarray.DataArray ()> Size: 8B
            array(0)
            xarray.DataArray
            • 0
              array(0)
                # - Total water level is ok
                
                In [17]:
                # LAT/LON PRE-TREATMENT TO GET RIDE OF SATELLITE "JUMPS"
                # ---------------------------------------------------------------------------------------
                #1. Search for the index of position spaced by less than 0.55° to determine the zones (0.55 is the max resolution of our dataset)
                lon, lat = seadata_continents.lon.values, seadata_continents.lat.values
                start_index, end_index = FUNC.find_index(lon, lat,0.55)
                
                #2. Zone creation through the fusion of lat/lon 
                join_section,join_index = FUNC.join_sections(start_index, end_index, lon, lat,0.55)
                
                #3. Selection of the zone that have more than 2 position
                join_section_filtered = [(i, j) for i, j in join_index if np.abs(i - j) > 2]
                
                In [18]:
                alpha = np.arctan2(np.diff(lat),np.diff(lon))
                alpha = np.append(alpha, alpha[-1])
                normal = alpha+np.pi/2
                
                In [19]:
                # calculate incident angle
                def get_incident_angle(wave_dir, normal): 
                    return np.mod(wave_dir - normal, 2*np.pi)
                
                incident_angle = get_incident_angle(np.radians(seadata_continents["dir_mounthly"]), normal)
                
                df = pd.DataFrame({
                    "lon":lon,
                    "lat":lat,
                    "normal":normal,
                    "wave_angle":np.radians(seadata_continents["dir_mounthly"][0, :]),
                    "incident_angle":incident_angle[1],
                    "magnitude":np.ones(len(lon)),
                    
                })
                df['index'] = df.index
                hv.Scatter(df, kdims=['lon', 'lat']).opts(width=1000, height=500, tools = ['hover'],size=20,color="index", colorbar=True, colorbar_opts={"title":"node number"})*\
                hv.VectorField(df, kdims=['lon', 'lat'], vdims=['normal','magnitude']).opts(width=1000, height=500,  scale = 0.01)
                
                hv.Scatter(df, kdims=['lon', 'lat']).opts(width=1000, height=500, tools = ['hover'],size=20,color="incident_angle", colorbar=True, cmap="colorwheel", colorbar_opts={"title":"incident angle"})*\
                hv.VectorField(df, kdims=['lon', 'lat'], vdims=['normal','magnitude']).opts(width=1000, height=500,  scale = 0.01)*\
                hv.VectorField(df, kdims=['lon', 'lat'], vdims=['wave_angle','magnitude']).opts(width=1000, height=500,  scale = 0.01, line_dash="dashed", title = "incident angle")
                
                WARNING:param.Scatter: Chart elements should only be supplied a single kdim
                
                Out[19]:
                WARNING:param.Scatter: Chart elements should only be supplied a single kdim
                
                Out[19]:

                we can see the global trends of offshore waves on the east coast and onshore wave on the west coast

                In [56]:
                import time
                def twl(seadata_continents, t, idx, incidence_angle, setup_):
                    twl = seadata_continents["sla_mounthly"][t,idx] + seadata_continents["dac_mounthly"][t,idx] + (np.cos(incidence_angle[t,idx])*setup_[t,idx]) 
                    return twl
                
                def kamp_mass(Tp, Hs, incident_angle, roh = ROH, rohs = ROHS, d50 = D50):
                    kamp_mass_i = 2.33 * (rohs / (rohs - roh)) * (Tp ** 1.5) * (np.tan(beta) ** 0.75) * (d50 ** -0.25) * (Hs ** 2) * np.abs(np.sin(2 * incident_angle)) ** 0.6 * np.sign(incident_angle)
                    return kamp_mass_i
                
                def kamp_in_m3_per_month(kamp_mass, roh = ROH, rohs = ROHS, poro = PORO):
                    return 86400 * 30 * (kamp_mass / (rohs - roh)) / (1.0 - poro)
                
                def compute_morpho_tot(DoC, QrivD, Ls, beta, t, idx, j, dkamp, dt):
                    a = -1 / DoC[t,idx]
                    b = (dkamp[t, idx] + QrivD[t, idx]) / Ls[j]
                    c = 1 / (np.tan(beta[t, idx]))
                    d = 1 / (np.tan(beta[t - 1, idx]))
                    return (a * b + (c - d)) * dt
                
                def compute_morpho_lst(DoC, QrivD, Ls, t, idx, j, dkamp, dt):
                    return ((-1 / DoC[t,idx]) * ((dkamp[t, idx] + QrivD[t, idx]) / Ls[j])) * dt
                
                def compute_morpho_xshore(beta, t, idx, dt):
                    return ((1 / (np.tan(beta[t,idx]))) - (1 / (np.tan(beta[t - 1, idx])))) * dt
                
                # MAIN LOOP
                # ---------------------------------------------------------------------------------------
                dt = 1                            # time step 1 month
                
                
                kamp_mass_i = kamp_mass(seadata_continents["Tp_mounthly"],seadata_continents["Hs_mounthly"], incident_angle)
                kamp_mass_ip1 = kamp_mass(np.roll(seadata_continents["Tp_mounthly"],1,axis=1),np.roll(seadata_continents["Hs_mounthly"],1,axis=1), incident_angle)
                kamp_i = kamp_in_m3_per_month(kamp_mass_i)
                kamp_ip1 = kamp_in_m3_per_month(kamp_mass_ip1)
                dkamp = kamp_ip1 - kamp_i
                
                
                df2 = pd.DataFrame({
                    "lon":lon,
                    "lat":lat,
                    "normal":normal,
                    "kamp":kamp_i.isel(mounth=-1).data,
                    "kamp_p1":kamp_ip1.isel(mounth=-1).data,
                    "dkamp":dkamp.isel(mounth=-1).data
                    
                })
                
                df2.hvplot.scatter(x="lon", y="lat", c="dkamp").opts(clim=(-1e5, 1e5))
                
                Out[56]:
                In [ ]:
                # 1. FIRST SPATIAL LOOP WHICH SELECT EACH JOIN SECTION INDEPENDENTLY
                for i, section in tqdm(enumerate(join_section_filtered)):
                    start_index                 = section[0]
                    end_index                   = section[1]
                
                    index                       = np.arange(start_index, end_index)
                    zone_length                 = len(index)
                
                    # a. INITIALISATION OF RESULTS TABLE FOR THE SECTION
                    dKAMP                       = np.zeros((NMONTHS, zone_length))
                    dTWL                        = np.zeros((NMONTHS, zone_length))
                    twl_                         = np.zeros((NMONTHS, zone_length))
                
                    dx_CS_Hydro                 = np.zeros((NMONTHS, zone_length))
                    dx_CS_MorphoLST             = np.zeros((NMONTHS, zone_length))
                    dx_CS_MorphoTOT             = np.zeros((NMONTHS, zone_length))
                    dx_CS_MorphoXshore          = np.zeros((NMONTHS, zone_length))
                    dx_CS_TOTAL                 = np.zeros((NMONTHS, zone_length))
                
                    Lon                         = np.zeros((NMONTHS, zone_length))
                    Lat                         = np.zeros((NMONTHS, zone_length))
                    DLon                        = np.zeros((NMONTHS, zone_length))
                    DLat                        = np.zeros((NMONTHS, zone_length))
                
                    X_CS_MorphoLST              = np.zeros((NMONTHS, zone_length))
                    X_CS_MorphoTOT              = np.zeros((NMONTHS, zone_length))
                    X_CS_Hydro                  = np.zeros((NMONTHS, zone_length))
                    X_CS_MorphoXshore           = np.zeros((NMONTHS, zone_length))
                    X_CS_TOTAL                  = np.zeros((NMONTHS, zone_length))
                
                    # b. PRE-PROCESS OF LAT/LON DATA - AS WE CONVERT THEM IN METERS WE TAKE THE MID VALUES AS A REFERENCE
                    lat_mid = np.mean(lat[index])
                    lon_mid = np.mean(lon[index])
                    Lon_m, Lat_m = FUNC.lonlat2xy(lon[index], lat[index], lon_mid, lat_mid)
                
                    # DISTANCE TWO CONSECUTIVE POINT CALCULTION IN METERS
                    dLon = np.diff(Lon_m)
                    dLat = np.diff(Lat_m)
                    dLon = np.append(dLon, 0)
                    dLat = np.append(dLat, 0)
                    Ls = np.sqrt(dLon ** 2 + dLat ** 2)
                    smooth_Ls = 1.5 * Ls  # m
                
                    # d. TIME LOOP
                    print(NMONTHS-1)
                    for t in range(1, NMONTHS-1):
                        #d.1. FOR EACH COASTPOINT WE COMPUTE THE DELTA COASTLINE VARIABILITY
                        for j in range(zone_length - 1):
                            idx                  = index[j]                            #we associate the section point index to the global dataset 
                            # print(idx)
                            # TOTAL WATER LEVEL COMPUTATION TAKING INTO ACCOUNT THE LOCAL INCIDENCE ANGLE
                            # TWL[t,j]             = seadata_continents["sla_mounthly"][t,idx] + seadata_continents["dac_mounthly"][t,idx] + (np.cos(incidence_angle[t,idx])*setup_[t,idx]) 
                            twl_[t,j] = twl(seadata_continents, t, idx, incident_angle, setup_)
                            dTWL[t, j]           = twl_[t,j] - twl_[t-1,j]
                            dx_CS_Hydro[t, j]    = - (twl_[t,j] - twl_[t-1,j]) / (np.tan(beta[t, idx]))  # m
                
                            # MORPHOLOGICAL COMPONENT - LONGSHORE TRANSPORT RATE (KAMPHUIS, 1991)
                            incidence_angle_ip1  = incident_angle[t, idx + 1]
                            incidence_angle_i    = incident_angle[t, idx]  # radians
                
                            dx_CS_MorphoTOT[t, j] = compute_morpho_tot(DoC_padded,QrivD, Ls, beta, t, idx,j, dkamp, dt).values
                            dx_CS_MorphoLST[t, j] = compute_morpho_lst(DoC_padded, QrivD, Ls, t, idx, j, dkamp, dt)
                            dx_CS_MorphoXshore[t, j] = compute_morpho_xshore(beta, t, idx, dt)
                            # TOTAL DELTA
                            dx_CS_TOTAL[t,j]      = dx_CS_Hydro[t,j] + dx_CS_MorphoTOT[t,j]
                
                        # WHEN ALL DELTA POSITION COMPUTED WE ADD THEM FOR EACH TIME STEP TO THE PREVIOUS CROSS-SHORE POSITION (m)
                        # COMPONENT
                        X_CS_Hydro[t, :]          = X_CS_Hydro[t - 1, :] + dx_CS_Hydro[t, :]
                        X_CS_MorphoTOT[t, :]      = X_CS_MorphoTOT[t - 1, :] + dx_CS_MorphoTOT[t, :]
                        X_CS_MorphoLST[t, :]      = X_CS_MorphoLST[t - 1, :] + dx_CS_MorphoLST[t, :]
                        X_CS_MorphoXshore[t, :]   = X_CS_MorphoXshore[t - 1, :] + dx_CS_MorphoXshore[t, :]
                
                        # TOTAL
                        X_CS_TOTAL[t,:]           = X_CS_TOTAL[t-1,:] + dx_CS_TOTAL[t,:]
                        
                        # PROJECTION FROM CARTESIAN TO GEOGRAPHICAL (meters)
                        DLon[t,:]                 = - dx_CS_TOTAL[t,:]  * np.sin(normal[index])
                        DLat[t,:]                 =   dx_CS_TOTAL[t,:]  * np.cos(normal[index])
                
                        time0 = time.time()
                        DLon[t,:]                 = FUNC.Wfilter(DLon[t,:],Lon_m,Lat_m,smooth_Ls)
                        DLat[t,:]                 = FUNC.Wfilter(DLat[t,:],Lon_m,Lat_m,smooth_Ls)
                        # print(np.shape(DLon))
                        # print(np.shape(Lon_m))
                        # print(np.shape(smooth_Ls))
                        # UPDATING THE COORDINATES 
                        Lon_m                     = Lon_m + DLon[t]
                        Lat_m                     = Lat_m + DLat[t]
                        
                        # CONVERTING IN °
                        Lon[t,:],Lat[t,:]         = FUNC.xy2lonlat(Lon_m, Lat_m, lon_mid, lat_mid)
                        # print(time.time()- time0)
                
                    break
                    # #e. FOR EACH SECTION WE STOCK THE RESULTS
                    # r_index.append(index)
                    # r_dKAMP.append(dKAMP)
                    # r_dTWL.append(dTWL)
                    # r_TWL.append(TWL)
                
                    # r_Ls.append(Ls)
                    # r_alpha.append(alpha)
                    # r_normal.append(normal)
                    # r_incidence_angle.append(incidence_angle)
                    
                    # r_dx_CS_Hydro.append(dx_CS_Hydro)
                    # r_dx_CS_MorphoLST.append(dx_CS_MorphoLST)
                    # r_dx_CS_MorphoTOT.append(dx_CS_MorphoTOT)
                    # r_dx_CS_MorphoXshore.append(dx_CS_MorphoXshore)
                    # r_dx_CS_TOTAL.append(dx_CS_TOTAL)
                    
                    # r_X_CS_MorphoLST.append(X_CS_MorphoLST)
                    # r_X_CS_MorphoTOT.append(X_CS_MorphoTOT)
                    # r_X_CS_Hydro.append(X_CS_Hydro)
                    # r_X_CS_MorphoXshore.append(X_CS_MorphoXshore)
                    # r_X_CS_TOTAL.append(X_CS_TOTAL)
                
                    # r_Lon.append(Lon)
                    # r_Lat.append(Lat)
                    # r_Lon_m.append(Lon_m)
                    # r_Lat_m.append(Lat_m)
                    # r_DLon.append(DLon)
                    # r_DLat.append(DLat)
                    # r_dLon.append(dLon)
                    # r_dLat.append(dLat)
                
                lon[index]
                
                0it [00:00, ?it/s]
                323
                
                0it [00:15, ?it/s]
                
                Out[ ]:
                array([164.124056, 163.785778, 163.414111, 163.120806, 163.213278,
                       163.210028, 163.047556, 162.857583, 162.560889, 162.221639,
                       162.140806, 161.992444, 161.987417, 162.439139, 162.580056])
                In [71]:
                fig, ax = plt.subplots()
                # 1. FIRST SPATIAL LOOP WHICH SELECT EACH JOIN SECTION INDEPENDENTLY
                for i, section in tqdm(enumerate(join_section_filtered)):
                    start_index                 = section[0]
                    end_index                   = section[1]
                    index                       = np.arange(start_index, end_index)
                    im = ax.scatter(lon[index], lat[index])
                plt.show()
                
                222it [00:00, 1114.31it/s]
                
                No description has been provided for this image
                In [ ]:
                # In[12]:
                
                
                # CONCATENATION OF THE RESULTS
                # ---------------------------------------------------------------------------------------
                results_dKAMP              = np.concatenate(r_dKAMP, axis=1)
                results_dTWL               = np.concatenate(r_dTWL, axis=1)
                results_TWL                = np.concatenate(r_TWL, axis=1)
                
                results_Ls                 = np.concatenate(r_Ls, axis=1)
                results_alpha              = np.concatenate(r_alpha, axis=1)
                
                results_normal             = np.concatenate(r_normal, axis=1)
                results_incidence_angle    = np.concatenate(r_incidence_angle, axis=1)
                
                results_dx_CS_Hydro        = np.concatenate(r_dx_CS_Hydro, axis=1)
                results_dx_CS_MorphoLST    = np.concatenate(r_dx_CS_MorphoLST, axis=1)
                results_dx_CS_MorphoTOT    = np.concatenate(r_dx_CS_MorphoTOT, axis=1)
                results_dx_CS_MorphoXshore = np.concatenate(r_dx_CS_MorphoXshore, axis=1)
                results_dx_CS_TOTAL        = np.concatenate(r_dx_CS_TOTAL, axis=1)
                
                results_X_CS_MorphoLST     = np.concatenate(r_X_CS_MorphoLST, axis=1)
                results_X_CS_MorphoTOT     = np.concatenate(r_X_CS_MorphoTOT, axis=1)
                results_X_CS_Hydro         = np.concatenate(r_X_CS_Hydro, axis=1)
                results_X_CS_MorphoXshore  = np.concatenate(r_X_CS_MorphoXshore, axis=1)
                results_X_CS_TOTAL         = np.concatenate(r_X_CS_TOTAL, axis=1)
                  
                results_Lon                = np.concatenate(r_Lon, axis=1)
                results_Lat                = np.concatenate(r_Lat, axis=1)
                results_Lon_m              = np.concatenate(r_Lon_m, axis=1)
                results_Lat_m              = np.concatenate(r_Lat_m, axis=1)
                results_DLon               = np.concatenate(r_DLon, axis=1)
                results_DLat               = np.concatenate(r_DLat, axis=1)
                results_dLon               = np.concatenate(r_dLon, axis=1)
                results_dLat               = np.concatenate(r_dLat, axis=1)
                results_index              = np.concatenate(r_index)
                
                # Calibration of our seasonal cycles 
                date_list= pd.date_range('2000-1-1','2019-12-31', freq='ME').strftime("%Y-%m-%d")
                date = pd.DatetimeIndex(date_list)
                num_dates = len(date_list)
                lon_len = Xshores_val.shape[1]
                
                print(results_X_CS_MorphoTOT.shape)
                print(results_Lat.shape)
                print(results_Lon.shape)
                print(results_dx_CS_Hydro.shape)
                print(results_dx_CS_MorphoLST.shape)
                print(results_dx_CS_MorphoTOT.shape)
                print(results_dx_CS_MorphoXshore.shape)
                print(results_dx_CS_TOTAL.shape)
                
                ds = xr.Dataset(data_vars=
                    {
                        "results_dx_CS_Hydro": (("time", "node"), results_dx_CS_Hydro),
                        "results_dx_CS_MorphoLST": (("time", "node"), results_dx_CS_MorphoLST),
                        "results_dx_CS_MorphoTOT": (("time", "node"), results_dx_CS_MorphoTOT),
                        "results_dx_CS_MorphoXshore": (("time", "node"), results_dx_CS_MorphoXshore),
                        "results_dx_CS_TOTAL": (("time", "node"), results_dx_CS_TOTAL),
                        "x": (("time", "node"), results_Lon_m),
                        "y": (("time", "node"), results_Lat_m)
                    }, 
                    coords={
                        "lon": results_Lon[0,:], 
                        "lat": results_Lat[0,:]
                    }, 
                )
                
                print(ds)
                fig, ax = plt.subplots()
                im = ax.scatter(ds.lon, ds.lat, c = ds.results_dx_CS_TOTAL.mean(dim="time"), vmax=1, vmin=-1)
                plt.colorbar(im)
                plt.title("results_dx_CS_TOTAL")
                plt.show()
                # stop here, script does not work aferwards
                
                VALIDATION_POSITION = {f"Position_{i}": Xshores_val[:, i] for i in range(lon_len)}
                VALIDATION = pd.DataFrame(VALIDATION_POSITION)
                print(VALIDATION_POSITION)
                print(VALIDATION)
                print(X_CS_TOTAL.shape)
                MODELE_POSITION = {f"Position_{i}": X_CS_TOTAL[i,:] for i in range(lon_len)}
                MODELE = pd.DataFrame(MODELE_POSITION)
                
                print(MODELE)
                print(MODELE_POSITION)
                
                def compute_seasonal_cycle_np(X, dates):
                    """
                    Calcule le cycle saisonnier moyen d'une série temporelle.
                
                    Paramètres:
                    - X (array): Série temporelle.
                    - dates (pd.DatetimeIndex): Index temporel contenant les dates.
                
                    Retourne:
                    - seasonal_cycle (array): Cycle saisonnier moyen (taille = 12 mois).
                    """
                    months = dates.month  # Extraire les mois
                    seasonal_cycle = np.array([np.mean(X[months == m]) for m in range(1, 13)])  # Moyenne par mois
                    return seasonal_cycle
                
                #  Fonction pour ajuster MODELE avant le calcul du cycle saisonnier
                def transform_MODELE(c, X_MODELE):
                    """
                    Applique une transformation linéaire à X_MODELE.
                
                    Paramètres:
                    - c (float): Coefficient multiplicatif.
                    - X_MODELE (array): Série temporelle originale.
                
                    Retourne:
                    - X_transformed (array): Série ajustée.
                    """
                    return X_MODELE * c  # Multiplication par c pour ajustement
                
                #  Fonction pour calculer la RMSE après transformation et cycle saisonnier
                def compute_rmse_c(c, X_MODELE, X_VALIDATION, dates):
                    """
                    Transforme X_MODELE, calcule son cycle saisonnier et compare avec X_VALIDATION.
                
                    Paramètres:
                    - c (float): Coefficient multiplicatif.
                    - X_MODELE (array): Série temporelle originale.
                    - X_VALIDATION (array): Cycle saisonnier de validation (taille 12).
                    - dates (pd.DatetimeIndex): Index temporel contenant les dates.
                
                    Retourne:
                    - RMSE (float): Erreur quadratique moyenne.
                    """
                    X_transformed = transform_MODELE(c, X_MODELE)  # Appliquer la transformation
                    seasonal_cycle_MODELE = compute_seasonal_cycle_np(X_transformed, dates)  # Calcul du cycle saisonnier
                    seasonal_cycle_VALIDATION = compute_seasonal_cycle_np(X_VALIDATION, dates)  # Cycle saisonnier de validation
                    
                    return np.sqrt(np.mean((seasonal_cycle_MODELE - seasonal_cycle_VALIDATION) ** 2))  # Calcul de la RMSE
                # Liste des positions à analyser
                positions_to_plot = [f"Position_{i}" for i in range(Xshores_val.shape[1])]
                
                # Stocker les résultats optimaux
                optimal_c_values = {}
                
                #  Boucle sur chaque position pour optimiser c
                for pos in positions_to_plot:
                    X_MODELE = MODELE[pos].values  # Extraire la série MODELE (array)
                    X_VALIDATION = VALIDATION[pos].values  # Extraire la série VALIDATION (array)
                    dates = MODELE.index  # Récupérer les dates
                
                    # Recherche du meilleur c qui minimise la RMSE
                    result = minimize_scalar(
                        compute_rmse_c, 
                        bounds=(-50, 50),  # Plage de recherche
                        args=(X_MODELE, X_VALIDATION, dates), 
                        method="bounded"
                    )
                
                    # Extraction du c optimal
                    c_optimal = result.x
                    rmse_min = result.fun
                    optimal_c_values[pos] = c_optimal  # Stocker le meilleur c
                
                    print(f" {pos} - Meilleur c: {c_optimal:.6f}, RMSE minimale: {rmse_min:.10f}")
                
                    #  Visualisation de l'évolution de la RMSE en fonction de c
                    c_values = np.linspace(c_optimal - 2, c_optimal + 2, 500)  # Générer plusieurs valeurs de c
                    rmse_values = [compute_rmse_c(c, X_MODELE, X_VALIDATION, dates) for c in c_values]  # Calcul de la RMSE
                
                    plt.figure(figsize=(10, 6))
                    plt.plot(c_values, rmse_values, label="RMSE vs c", linewidth=2)
                    plt.axvline(c_optimal, color='red', linestyle='--', label=f"Optimal c = {c_optimal:.6f}")
                    plt.xlabel("Coefficient c")
                    plt.ylabel("RMSE")
                    plt.title(f"Évolution de la RMSE pour {pos}")
                    plt.legend()
                    plt.grid()
                    plt.show()
                
                    #  Comparaison des cycles saisonniers avant et après ajustement
                    X_transformed_optimal = transform_MODELE(c_optimal, X_MODELE)
                    seasonal_cycle_optimal = compute_seasonal_cycle_np(X_transformed_optimal, dates)
                    seasonal_cycle_original = compute_seasonal_cycle_np(X_MODELE, dates)
                    seasonal_cycle_validation = compute_seasonal_cycle_np(X_VALIDATION, dates)
                
                    plt.figure(figsize=(10, 6))
                    plt.plot(seasonal_cycle_validation, label="Cycle Validation (Xval)", color="blue", linewidth=2)
                    plt.plot(seasonal_cycle_original, label="Cycle MODELE Original", color="gray", linestyle="--", alpha=0.7)
                    plt.plot(seasonal_cycle_optimal, label=f"Cycle Ajusté (c={c_optimal:.6f})", color="red", linewidth=2)
                
                    plt.xlabel("Mois")
                    plt.ylabel("Valeur")
                    plt.title(f"Comparaison des Cycles Saisonniers pour {pos}")
                    plt.legend()
                    plt.grid()
                    plt.show()
                    
                    #  Affichage final des meilleurs coefficients c
                print("\n Meilleurs coefficients c trouvés pour chaque position :")
                for pos, c_value in optimal_c_values.items():
                    print(f"{pos}: c = {c_value:.6f}")
                
                X_GLOBCOASTS_CALIBRE = c_value * X_CS_TOTAL
                
                # In[13]:
                
                
                # POST TREATMENT OF THE RESULTS
                # ---------------------------------------------------------------------------------------
                # a. TEMPORAL SMOOTH OVER 3 MONTHS 
                # Define the sigma (standard deviation) for Gaussian smoothing
                # Assuming an approximate 30-day window, adjust sigma based on your data frequency
                
                sigma = 1.0  # This will smooth over approximately 3 months, adjust as needed
                
                X_TOTAL_detrend             = FUNC.gaussian_smooth(results_X_CS_TOTAL, sigma)
                X_Hydro_detrend             = FUNC.gaussian_smooth(results_X_CS_Hydro, sigma)
                X_MorphoTOT_detrend         = FUNC.gaussian_smooth(results_X_CS_MorphoTOT, sigma)
                X_MorphoXshore_detrend      = FUNC.gaussian_smooth(results_X_CS_MorphoXshore, sigma)
                X_MorphoLST_detrend         = FUNC.gaussian_smooth(results_X_CS_MorphoLST,sigma)
                
                X_val_detrend               = FUNC.gaussian_smooth(Xshores_val, sigma)
                
                dx_Hydro_detrend            = FUNC.gaussian_smooth(results_dx_CS_Hydro, sigma)
                dx_MorphoLST_detrend        = FUNC.gaussian_smooth(results_dx_CS_MorphoLST, sigma)
                dx_MorphoXshore_detrend     = FUNC.gaussian_smooth(results_dx_CS_MorphoXshore, sigma)
                dx_MorphoTOT_detrend        = FUNC.gaussian_smooth(results_dx_CS_MorphoTOT, sigma)
                dx_TOTAL_detrend            = FUNC.gaussian_smooth(results_dx_CS_TOTAL,sigma)
                
                SLA_detrend                 = FUNC.gaussian_smooth(SLA[:,results_index],sigma)
                DAC_detrend                 = FUNC.gaussian_smooth(DAC[:,results_index],sigma)
                SU_detrend                  = FUNC.gaussian_smooth(SU[:,results_index],sigma)
                DKAMP_detrend               = FUNC.gaussian_smooth(results_dKAMP,sigma)
                QrivD_detrend               = FUNC.gaussian_smooth(QrivD[:,results_index],sigma)
                
                
                # In[14]:
                
                
                # b. LINEAR DETREND
                X_MorphoLST_detrend_linear    = np.apply_along_axis(detrend_linear,0,X_MorphoLST_detrend)
                X_Hydro_detrend_linear        = np.apply_along_axis(detrend_linear,0,X_Hydro_detrend)
                X_MorphoXshore_detrend_linear = np.apply_along_axis(detrend_linear,0,X_MorphoXshore_detrend)
                X_MorphoTOT_detrend_linear    = np.apply_along_axis(detrend_linear,0,X_MorphoTOT_detrend)
                X_TOTAL_detrend_linear        = np.apply_along_axis(detrend_linear,0,X_TOTAL_detrend)
                X_val_detrend_global          = np.apply_along_axis(detrend_linear,0,Xshores_val)
                
                
                dx_Hydro_detrend_linear       = np.apply_along_axis(detrend_linear,0,dx_Hydro_detrend)
                dx_MorphoLST_detrend_linear   = np.apply_along_axis(detrend_linear,0,dx_MorphoLST_detrend)
                dx_MorphoXshore_detrend_linear= np.apply_along_axis(detrend_linear,0,dx_MorphoXshore_detrend)
                dx_MorphoTOT_detrend_linear   = np.apply_along_axis(detrend_linear,0,dx_MorphoTOT_detrend)
                dx_TOTAL_detrend_linear       = np.apply_along_axis(detrend_linear,0,dx_TOTAL_detrend)
                
                
                SLA_detrend_linear            = np.apply_along_axis(detrend_linear,0,SLA_detrend)
                DAC_detrend_linear            = np.apply_along_axis(detrend_linear,0,DAC_detrend)
                SU_detrend_linear             = np.apply_along_axis(detrend_linear,0,SU_detrend)
                DKAMP_detrend_linear          = np.apply_along_axis(detrend_linear,0,DKAMP_detrend)
                QrivD_detrend_linear          = np.apply_along_axis(detrend_linear,0,QrivD_detrend)
                
                
                # In[ ]:
                
                
                
                
                
                # In[ ]:
                
                
                
                
                
                # In[ ]: