In [1]:
import numpy as np
import pandas as pd
from pytides2.tide import Tide
import xarray as xr
In [2]:
from dask.distributed import Client, LocalCluster
cluster = LocalCluster(
    n_workers=128,          # one worker per thread
    threads_per_worker=1,   # HDF5/xarray likes this
    memory_limit="2GB"      # or ~ (503/256) GiB
)
client = Client(cluster)
client
Out[2]:

Client

Client-fe3ab678-8165-11f0-8569-000000befe80

Connection method: Cluster object Cluster type: distributed.LocalCluster
Dashboard: http://127.0.0.1:8787/status

Cluster Info

LocalCluster

74a737ed

Dashboard: http://127.0.0.1:8787/status Workers: 128
Total threads: 128 Total memory: 238.42 GiB
Status: running Using processes: True

Scheduler Info

Scheduler

Scheduler-54b940b6-2936-4b57-a11a-fa0c9f81e1ae

Comm: tcp://127.0.0.1:39433 Workers: 0
Dashboard: http://127.0.0.1:8787/status Total threads: 0
Started: Just now Total memory: 0 B

Workers

Worker: 0

Comm: tcp://127.0.0.1:37893 Total threads: 1
Dashboard: http://127.0.0.1:34979/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:38713
Local directory: /tmp/dask-scratch-space/worker-synalya4

Worker: 1

Comm: tcp://127.0.0.1:40813 Total threads: 1
Dashboard: http://127.0.0.1:40233/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:46867
Local directory: /tmp/dask-scratch-space/worker-2_6nfnbn

Worker: 2

Comm: tcp://127.0.0.1:40405 Total threads: 1
Dashboard: http://127.0.0.1:46427/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44921
Local directory: /tmp/dask-scratch-space/worker-aqyymz3h

Worker: 3

Comm: tcp://127.0.0.1:45775 Total threads: 1
Dashboard: http://127.0.0.1:46393/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:36303
Local directory: /tmp/dask-scratch-space/worker-ikynb99o

Worker: 4

Comm: tcp://127.0.0.1:44997 Total threads: 1
Dashboard: http://127.0.0.1:36633/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45177
Local directory: /tmp/dask-scratch-space/worker-j28yzdsw

Worker: 5

Comm: tcp://127.0.0.1:44291 Total threads: 1
Dashboard: http://127.0.0.1:44527/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41515
Local directory: /tmp/dask-scratch-space/worker-hpbwogm0

Worker: 6

Comm: tcp://127.0.0.1:38337 Total threads: 1
Dashboard: http://127.0.0.1:32851/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34427
Local directory: /tmp/dask-scratch-space/worker-fxhuhz8l

Worker: 7

Comm: tcp://127.0.0.1:42065 Total threads: 1
Dashboard: http://127.0.0.1:41413/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44863
Local directory: /tmp/dask-scratch-space/worker-58m3s63h

Worker: 8

Comm: tcp://127.0.0.1:46729 Total threads: 1
Dashboard: http://127.0.0.1:45719/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44405
Local directory: /tmp/dask-scratch-space/worker-gj9z4rjr

Worker: 9

Comm: tcp://127.0.0.1:37787 Total threads: 1
Dashboard: http://127.0.0.1:36051/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33565
Local directory: /tmp/dask-scratch-space/worker-50kkmz1k

Worker: 10

Comm: tcp://127.0.0.1:33265 Total threads: 1
Dashboard: http://127.0.0.1:36931/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:36007
Local directory: /tmp/dask-scratch-space/worker-z0od6n2z

Worker: 11

Comm: tcp://127.0.0.1:44733 Total threads: 1
Dashboard: http://127.0.0.1:45237/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44869
Local directory: /tmp/dask-scratch-space/worker-8p_h5s4w

Worker: 12

Comm: tcp://127.0.0.1:36075 Total threads: 1
Dashboard: http://127.0.0.1:40369/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:38755
Local directory: /tmp/dask-scratch-space/worker-y79nikiq

Worker: 13

Comm: tcp://127.0.0.1:38633 Total threads: 1
Dashboard: http://127.0.0.1:43199/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34299
Local directory: /tmp/dask-scratch-space/worker-ubjza4bj

Worker: 14

Comm: tcp://127.0.0.1:45729 Total threads: 1
Dashboard: http://127.0.0.1:35801/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33807
Local directory: /tmp/dask-scratch-space/worker-ylgh_2j7

Worker: 15

Comm: tcp://127.0.0.1:33929 Total threads: 1
Dashboard: http://127.0.0.1:42907/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44111
Local directory: /tmp/dask-scratch-space/worker-2ke2a4su

Worker: 16

Comm: tcp://127.0.0.1:40365 Total threads: 1
Dashboard: http://127.0.0.1:37263/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:46377
Local directory: /tmp/dask-scratch-space/worker-ep73_h13

Worker: 17

Comm: tcp://127.0.0.1:36233 Total threads: 1
Dashboard: http://127.0.0.1:43411/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:35929
Local directory: /tmp/dask-scratch-space/worker-z4wnv3y7

Worker: 18

Comm: tcp://127.0.0.1:44949 Total threads: 1
Dashboard: http://127.0.0.1:41425/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:38909
Local directory: /tmp/dask-scratch-space/worker-obksyfs1

Worker: 19

Comm: tcp://127.0.0.1:45947 Total threads: 1
Dashboard: http://127.0.0.1:33091/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41329
Local directory: /tmp/dask-scratch-space/worker-sovgnkic

Worker: 20

Comm: tcp://127.0.0.1:33407 Total threads: 1
Dashboard: http://127.0.0.1:38645/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33239
Local directory: /tmp/dask-scratch-space/worker-c68t8vxh

Worker: 21

Comm: tcp://127.0.0.1:40099 Total threads: 1
Dashboard: http://127.0.0.1:40791/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44439
Local directory: /tmp/dask-scratch-space/worker-d0kth6ap

Worker: 22

Comm: tcp://127.0.0.1:38901 Total threads: 1
Dashboard: http://127.0.0.1:34853/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41059
Local directory: /tmp/dask-scratch-space/worker-rq0l9csa

Worker: 23

Comm: tcp://127.0.0.1:41387 Total threads: 1
Dashboard: http://127.0.0.1:38535/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43883
Local directory: /tmp/dask-scratch-space/worker-cfa_emtj

Worker: 24

Comm: tcp://127.0.0.1:39491 Total threads: 1
Dashboard: http://127.0.0.1:38783/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34419
Local directory: /tmp/dask-scratch-space/worker-mzqj57b4

Worker: 25

Comm: tcp://127.0.0.1:35575 Total threads: 1
Dashboard: http://127.0.0.1:46305/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40113
Local directory: /tmp/dask-scratch-space/worker-m3uszr6j

Worker: 26

Comm: tcp://127.0.0.1:45761 Total threads: 1
Dashboard: http://127.0.0.1:40353/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40889
Local directory: /tmp/dask-scratch-space/worker-p93vhzwx

Worker: 27

Comm: tcp://127.0.0.1:41789 Total threads: 1
Dashboard: http://127.0.0.1:45143/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:37977
Local directory: /tmp/dask-scratch-space/worker-o6i_uq27

Worker: 28

Comm: tcp://127.0.0.1:41877 Total threads: 1
Dashboard: http://127.0.0.1:43861/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:36485
Local directory: /tmp/dask-scratch-space/worker-z_doawy_

Worker: 29

Comm: tcp://127.0.0.1:34123 Total threads: 1
Dashboard: http://127.0.0.1:40771/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43343
Local directory: /tmp/dask-scratch-space/worker-exjg7y8t

Worker: 30

Comm: tcp://127.0.0.1:39781 Total threads: 1
Dashboard: http://127.0.0.1:42809/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:39685
Local directory: /tmp/dask-scratch-space/worker-_vp5enb6

Worker: 31

Comm: tcp://127.0.0.1:33321 Total threads: 1
Dashboard: http://127.0.0.1:39025/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41765
Local directory: /tmp/dask-scratch-space/worker-fug09z0h

Worker: 32

Comm: tcp://127.0.0.1:44003 Total threads: 1
Dashboard: http://127.0.0.1:35915/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40159
Local directory: /tmp/dask-scratch-space/worker-ilqlrxoz

Worker: 33

Comm: tcp://127.0.0.1:37293 Total threads: 1
Dashboard: http://127.0.0.1:38271/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41813
Local directory: /tmp/dask-scratch-space/worker-iyvij8d8

Worker: 34

Comm: tcp://127.0.0.1:34857 Total threads: 1
Dashboard: http://127.0.0.1:44723/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43381
Local directory: /tmp/dask-scratch-space/worker-zlal030m

Worker: 35

Comm: tcp://127.0.0.1:34655 Total threads: 1
Dashboard: http://127.0.0.1:37533/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45277
Local directory: /tmp/dask-scratch-space/worker-huncvt_b

Worker: 36

Comm: tcp://127.0.0.1:44355 Total threads: 1
Dashboard: http://127.0.0.1:38183/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:35125
Local directory: /tmp/dask-scratch-space/worker-xw7nilbk

Worker: 37

Comm: tcp://127.0.0.1:34451 Total threads: 1
Dashboard: http://127.0.0.1:34359/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:46299
Local directory: /tmp/dask-scratch-space/worker-55cl79k0

Worker: 38

Comm: tcp://127.0.0.1:38315 Total threads: 1
Dashboard: http://127.0.0.1:41019/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:46401
Local directory: /tmp/dask-scratch-space/worker-g1_zzbo6

Worker: 39

Comm: tcp://127.0.0.1:41879 Total threads: 1
Dashboard: http://127.0.0.1:35393/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:39687
Local directory: /tmp/dask-scratch-space/worker-9wmme9k4

Worker: 40

Comm: tcp://127.0.0.1:39733 Total threads: 1
Dashboard: http://127.0.0.1:43997/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45333
Local directory: /tmp/dask-scratch-space/worker-e3tqxhi7

Worker: 41

Comm: tcp://127.0.0.1:44107 Total threads: 1
Dashboard: http://127.0.0.1:43303/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42223
Local directory: /tmp/dask-scratch-space/worker-s_6clplm

Worker: 42

Comm: tcp://127.0.0.1:41025 Total threads: 1
Dashboard: http://127.0.0.1:43143/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42447
Local directory: /tmp/dask-scratch-space/worker-bmt2na15

Worker: 43

Comm: tcp://127.0.0.1:43451 Total threads: 1
Dashboard: http://127.0.0.1:38529/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40505
Local directory: /tmp/dask-scratch-space/worker-ncy60qzg

Worker: 44

Comm: tcp://127.0.0.1:36069 Total threads: 1
Dashboard: http://127.0.0.1:42715/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42113
Local directory: /tmp/dask-scratch-space/worker-02lf37jl

Worker: 45

Comm: tcp://127.0.0.1:42983 Total threads: 1
Dashboard: http://127.0.0.1:35287/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33177
Local directory: /tmp/dask-scratch-space/worker-hxx8mevq

Worker: 46

Comm: tcp://127.0.0.1:42305 Total threads: 1
Dashboard: http://127.0.0.1:41097/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45853
Local directory: /tmp/dask-scratch-space/worker-qhhk1o6w

Worker: 47

Comm: tcp://127.0.0.1:35615 Total threads: 1
Dashboard: http://127.0.0.1:38255/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33841
Local directory: /tmp/dask-scratch-space/worker-vugywd02

Worker: 48

Comm: tcp://127.0.0.1:35719 Total threads: 1
Dashboard: http://127.0.0.1:46763/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44877
Local directory: /tmp/dask-scratch-space/worker-z5b01fwq

Worker: 49

Comm: tcp://127.0.0.1:34017 Total threads: 1
Dashboard: http://127.0.0.1:46351/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33769
Local directory: /tmp/dask-scratch-space/worker-yyn3909b

Worker: 50

Comm: tcp://127.0.0.1:38637 Total threads: 1
Dashboard: http://127.0.0.1:36691/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45087
Local directory: /tmp/dask-scratch-space/worker-4xdowtg8

Worker: 51

Comm: tcp://127.0.0.1:39589 Total threads: 1
Dashboard: http://127.0.0.1:35409/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41533
Local directory: /tmp/dask-scratch-space/worker-czfmnlu5

Worker: 52

Comm: tcp://127.0.0.1:39069 Total threads: 1
Dashboard: http://127.0.0.1:35389/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:37353
Local directory: /tmp/dask-scratch-space/worker-tng6zef4

Worker: 53

Comm: tcp://127.0.0.1:40045 Total threads: 1
Dashboard: http://127.0.0.1:36951/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34199
Local directory: /tmp/dask-scratch-space/worker-_3l08t89

Worker: 54

Comm: tcp://127.0.0.1:35211 Total threads: 1
Dashboard: http://127.0.0.1:40031/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43731
Local directory: /tmp/dask-scratch-space/worker-x2gq2bjy

Worker: 55

Comm: tcp://127.0.0.1:46593 Total threads: 1
Dashboard: http://127.0.0.1:40729/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33267
Local directory: /tmp/dask-scratch-space/worker-8a8k_6bq

Worker: 56

Comm: tcp://127.0.0.1:39457 Total threads: 1
Dashboard: http://127.0.0.1:39217/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:37927
Local directory: /tmp/dask-scratch-space/worker-il4ksiys

Worker: 57

Comm: tcp://127.0.0.1:33861 Total threads: 1
Dashboard: http://127.0.0.1:46303/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45465
Local directory: /tmp/dask-scratch-space/worker-ectt4iw0

Worker: 58

Comm: tcp://127.0.0.1:33079 Total threads: 1
Dashboard: http://127.0.0.1:44133/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:46851
Local directory: /tmp/dask-scratch-space/worker-d7not_et

Worker: 59

Comm: tcp://127.0.0.1:43979 Total threads: 1
Dashboard: http://127.0.0.1:35655/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:38945
Local directory: /tmp/dask-scratch-space/worker-03d760xx

Worker: 60

Comm: tcp://127.0.0.1:39255 Total threads: 1
Dashboard: http://127.0.0.1:34715/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42681
Local directory: /tmp/dask-scratch-space/worker-w4rjgbb7

Worker: 61

Comm: tcp://127.0.0.1:39017 Total threads: 1
Dashboard: http://127.0.0.1:45355/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44741
Local directory: /tmp/dask-scratch-space/worker-s8_x1hve

Worker: 62

Comm: tcp://127.0.0.1:44209 Total threads: 1
Dashboard: http://127.0.0.1:34487/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43541
Local directory: /tmp/dask-scratch-space/worker-7w4k4mfl

Worker: 63

Comm: tcp://127.0.0.1:41617 Total threads: 1
Dashboard: http://127.0.0.1:43651/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:35893
Local directory: /tmp/dask-scratch-space/worker-6h83chm9

Worker: 64

Comm: tcp://127.0.0.1:36445 Total threads: 1
Dashboard: http://127.0.0.1:38141/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:36483
Local directory: /tmp/dask-scratch-space/worker-j7wh3aq_

Worker: 65

Comm: tcp://127.0.0.1:43911 Total threads: 1
Dashboard: http://127.0.0.1:42881/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45709
Local directory: /tmp/dask-scratch-space/worker-339dyl5t

Worker: 66

Comm: tcp://127.0.0.1:43267 Total threads: 1
Dashboard: http://127.0.0.1:44401/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45393
Local directory: /tmp/dask-scratch-space/worker-2ol2wqow

Worker: 67

Comm: tcp://127.0.0.1:33859 Total threads: 1
Dashboard: http://127.0.0.1:43621/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:37433
Local directory: /tmp/dask-scratch-space/worker-hgzslqda

Worker: 68

Comm: tcp://127.0.0.1:34639 Total threads: 1
Dashboard: http://127.0.0.1:34627/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41809
Local directory: /tmp/dask-scratch-space/worker-n0dpul1a

Worker: 69

Comm: tcp://127.0.0.1:34543 Total threads: 1
Dashboard: http://127.0.0.1:33451/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:35857
Local directory: /tmp/dask-scratch-space/worker-6jp_wlg0

Worker: 70

Comm: tcp://127.0.0.1:37027 Total threads: 1
Dashboard: http://127.0.0.1:34169/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:38917
Local directory: /tmp/dask-scratch-space/worker-9o4evj0t

Worker: 71

Comm: tcp://127.0.0.1:43581 Total threads: 1
Dashboard: http://127.0.0.1:36237/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44131
Local directory: /tmp/dask-scratch-space/worker-dwxs32yo

Worker: 72

Comm: tcp://127.0.0.1:45575 Total threads: 1
Dashboard: http://127.0.0.1:39203/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43657
Local directory: /tmp/dask-scratch-space/worker-s3a3v0a0

Worker: 73

Comm: tcp://127.0.0.1:40543 Total threads: 1
Dashboard: http://127.0.0.1:42577/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45061
Local directory: /tmp/dask-scratch-space/worker-3ev89dm5

Worker: 74

Comm: tcp://127.0.0.1:35247 Total threads: 1
Dashboard: http://127.0.0.1:38897/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40923
Local directory: /tmp/dask-scratch-space/worker-mbikyzvc

Worker: 75

Comm: tcp://127.0.0.1:36573 Total threads: 1
Dashboard: http://127.0.0.1:38981/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41943
Local directory: /tmp/dask-scratch-space/worker-_985x9l3

Worker: 76

Comm: tcp://127.0.0.1:37075 Total threads: 1
Dashboard: http://127.0.0.1:45179/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42277
Local directory: /tmp/dask-scratch-space/worker-4psz95xc

Worker: 77

Comm: tcp://127.0.0.1:39651 Total threads: 1
Dashboard: http://127.0.0.1:33995/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:39541
Local directory: /tmp/dask-scratch-space/worker-0cfdbozn

Worker: 78

Comm: tcp://127.0.0.1:38281 Total threads: 1
Dashboard: http://127.0.0.1:33303/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41797
Local directory: /tmp/dask-scratch-space/worker-4l34cdwi

Worker: 79

Comm: tcp://127.0.0.1:44069 Total threads: 1
Dashboard: http://127.0.0.1:46549/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33301
Local directory: /tmp/dask-scratch-space/worker-23xvdtck

Worker: 80

Comm: tcp://127.0.0.1:38167 Total threads: 1
Dashboard: http://127.0.0.1:39549/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40807
Local directory: /tmp/dask-scratch-space/worker-qv8g_47r

Worker: 81

Comm: tcp://127.0.0.1:34215 Total threads: 1
Dashboard: http://127.0.0.1:36297/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:36621
Local directory: /tmp/dask-scratch-space/worker-l6cu2bn9

Worker: 82

Comm: tcp://127.0.0.1:43613 Total threads: 1
Dashboard: http://127.0.0.1:37473/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:46643
Local directory: /tmp/dask-scratch-space/worker-e5frowcc

Worker: 83

Comm: tcp://127.0.0.1:41811 Total threads: 1
Dashboard: http://127.0.0.1:45151/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34205
Local directory: /tmp/dask-scratch-space/worker-lwq8_2_q

Worker: 84

Comm: tcp://127.0.0.1:33719 Total threads: 1
Dashboard: http://127.0.0.1:33789/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:36111
Local directory: /tmp/dask-scratch-space/worker-3ayk5f7s

Worker: 85

Comm: tcp://127.0.0.1:37509 Total threads: 1
Dashboard: http://127.0.0.1:34145/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:35691
Local directory: /tmp/dask-scratch-space/worker-sfw0estt

Worker: 86

Comm: tcp://127.0.0.1:43907 Total threads: 1
Dashboard: http://127.0.0.1:35639/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44321
Local directory: /tmp/dask-scratch-space/worker-3gj9hieb

Worker: 87

Comm: tcp://127.0.0.1:44495 Total threads: 1
Dashboard: http://127.0.0.1:43809/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42167
Local directory: /tmp/dask-scratch-space/worker-1wdowv54

Worker: 88

Comm: tcp://127.0.0.1:39611 Total threads: 1
Dashboard: http://127.0.0.1:41213/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44313
Local directory: /tmp/dask-scratch-space/worker-cbwolmh8

Worker: 89

Comm: tcp://127.0.0.1:45513 Total threads: 1
Dashboard: http://127.0.0.1:34141/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:36957
Local directory: /tmp/dask-scratch-space/worker-pi00pxbk

Worker: 90

Comm: tcp://127.0.0.1:35733 Total threads: 1
Dashboard: http://127.0.0.1:36865/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40951
Local directory: /tmp/dask-scratch-space/worker-e7u__144

Worker: 91

Comm: tcp://127.0.0.1:37659 Total threads: 1
Dashboard: http://127.0.0.1:38987/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44103
Local directory: /tmp/dask-scratch-space/worker-r9cr1b5v

Worker: 92

Comm: tcp://127.0.0.1:46091 Total threads: 1
Dashboard: http://127.0.0.1:34873/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:37187
Local directory: /tmp/dask-scratch-space/worker-8kbe65cy

Worker: 93

Comm: tcp://127.0.0.1:33573 Total threads: 1
Dashboard: http://127.0.0.1:32793/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42193
Local directory: /tmp/dask-scratch-space/worker-twnz1mcj

Worker: 94

Comm: tcp://127.0.0.1:38369 Total threads: 1
Dashboard: http://127.0.0.1:44715/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43659
Local directory: /tmp/dask-scratch-space/worker-6sia4tjp

Worker: 95

Comm: tcp://127.0.0.1:45823 Total threads: 1
Dashboard: http://127.0.0.1:40229/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40381
Local directory: /tmp/dask-scratch-space/worker-j0x3k43g

Worker: 96

Comm: tcp://127.0.0.1:35007 Total threads: 1
Dashboard: http://127.0.0.1:39377/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:32983
Local directory: /tmp/dask-scratch-space/worker-7pmnf_dg

Worker: 97

Comm: tcp://127.0.0.1:36921 Total threads: 1
Dashboard: http://127.0.0.1:38121/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42409
Local directory: /tmp/dask-scratch-space/worker-z03vbb08

Worker: 98

Comm: tcp://127.0.0.1:40765 Total threads: 1
Dashboard: http://127.0.0.1:45481/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45977
Local directory: /tmp/dask-scratch-space/worker-9qb9qv2b

Worker: 99

Comm: tcp://127.0.0.1:33169 Total threads: 1
Dashboard: http://127.0.0.1:41003/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:35059
Local directory: /tmp/dask-scratch-space/worker-yecd2jpr

Worker: 100

Comm: tcp://127.0.0.1:37101 Total threads: 1
Dashboard: http://127.0.0.1:35933/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42509
Local directory: /tmp/dask-scratch-space/worker-y1zsfsff

Worker: 101

Comm: tcp://127.0.0.1:41907 Total threads: 1
Dashboard: http://127.0.0.1:33027/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40929
Local directory: /tmp/dask-scratch-space/worker-iojcfhdo

Worker: 102

Comm: tcp://127.0.0.1:37701 Total threads: 1
Dashboard: http://127.0.0.1:40361/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45251
Local directory: /tmp/dask-scratch-space/worker-y4je99bp

Worker: 103

Comm: tcp://127.0.0.1:42215 Total threads: 1
Dashboard: http://127.0.0.1:34949/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42191
Local directory: /tmp/dask-scratch-space/worker-jjxg_rgq

Worker: 104

Comm: tcp://127.0.0.1:37953 Total threads: 1
Dashboard: http://127.0.0.1:36769/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:37641
Local directory: /tmp/dask-scratch-space/worker-oglsnrkk

Worker: 105

Comm: tcp://127.0.0.1:33011 Total threads: 1
Dashboard: http://127.0.0.1:36193/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42853
Local directory: /tmp/dask-scratch-space/worker-e7h45a8n

Worker: 106

Comm: tcp://127.0.0.1:45287 Total threads: 1
Dashboard: http://127.0.0.1:39653/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43763
Local directory: /tmp/dask-scratch-space/worker-oeh_6d58

Worker: 107

Comm: tcp://127.0.0.1:45395 Total threads: 1
Dashboard: http://127.0.0.1:46847/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:40859
Local directory: /tmp/dask-scratch-space/worker-fuztouy2

Worker: 108

Comm: tcp://127.0.0.1:36735 Total threads: 1
Dashboard: http://127.0.0.1:36809/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41251
Local directory: /tmp/dask-scratch-space/worker-v74sirak

Worker: 109

Comm: tcp://127.0.0.1:35335 Total threads: 1
Dashboard: http://127.0.0.1:42491/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41787
Local directory: /tmp/dask-scratch-space/worker-6kbc6d3o

Worker: 110

Comm: tcp://127.0.0.1:42889 Total threads: 1
Dashboard: http://127.0.0.1:41265/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34085
Local directory: /tmp/dask-scratch-space/worker-crnmzboi

Worker: 111

Comm: tcp://127.0.0.1:34247 Total threads: 1
Dashboard: http://127.0.0.1:42785/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:38501
Local directory: /tmp/dask-scratch-space/worker-0kyuo1oq

Worker: 112

Comm: tcp://127.0.0.1:39845 Total threads: 1
Dashboard: http://127.0.0.1:38845/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34065
Local directory: /tmp/dask-scratch-space/worker-fke10k5p

Worker: 113

Comm: tcp://127.0.0.1:37037 Total threads: 1
Dashboard: http://127.0.0.1:38181/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:32973
Local directory: /tmp/dask-scratch-space/worker-ojrms5vi

Worker: 114

Comm: tcp://127.0.0.1:39127 Total threads: 1
Dashboard: http://127.0.0.1:34263/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:41393
Local directory: /tmp/dask-scratch-space/worker-ez1ots4q

Worker: 115

Comm: tcp://127.0.0.1:46159 Total threads: 1
Dashboard: http://127.0.0.1:37333/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:37945
Local directory: /tmp/dask-scratch-space/worker-gvojjzkj

Worker: 116

Comm: tcp://127.0.0.1:42389 Total threads: 1
Dashboard: http://127.0.0.1:34151/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34385
Local directory: /tmp/dask-scratch-space/worker-hquyfglj

Worker: 117

Comm: tcp://127.0.0.1:41311 Total threads: 1
Dashboard: http://127.0.0.1:38851/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33601
Local directory: /tmp/dask-scratch-space/worker-pknqp8qr

Worker: 118

Comm: tcp://127.0.0.1:39759 Total threads: 1
Dashboard: http://127.0.0.1:35399/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:45855
Local directory: /tmp/dask-scratch-space/worker-pzdnc1gw

Worker: 119

Comm: tcp://127.0.0.1:39243 Total threads: 1
Dashboard: http://127.0.0.1:39197/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:37717
Local directory: /tmp/dask-scratch-space/worker-hyqtefis

Worker: 120

Comm: tcp://127.0.0.1:42487 Total threads: 1
Dashboard: http://127.0.0.1:32801/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:44471
Local directory: /tmp/dask-scratch-space/worker-lo7qqzsv

Worker: 121

Comm: tcp://127.0.0.1:36421 Total threads: 1
Dashboard: http://127.0.0.1:33527/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33053
Local directory: /tmp/dask-scratch-space/worker-tc3uu5hn

Worker: 122

Comm: tcp://127.0.0.1:39597 Total threads: 1
Dashboard: http://127.0.0.1:39103/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:38195
Local directory: /tmp/dask-scratch-space/worker-lsy4tumj

Worker: 123

Comm: tcp://127.0.0.1:46703 Total threads: 1
Dashboard: http://127.0.0.1:34185/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:35861
Local directory: /tmp/dask-scratch-space/worker-jez58bse

Worker: 124

Comm: tcp://127.0.0.1:33001 Total threads: 1
Dashboard: http://127.0.0.1:46707/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:43403
Local directory: /tmp/dask-scratch-space/worker-d2yx_p2j

Worker: 125

Comm: tcp://127.0.0.1:40463 Total threads: 1
Dashboard: http://127.0.0.1:42331/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:42213
Local directory: /tmp/dask-scratch-space/worker-rqk_f_s2

Worker: 126

Comm: tcp://127.0.0.1:39133 Total threads: 1
Dashboard: http://127.0.0.1:45933/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:34159
Local directory: /tmp/dask-scratch-space/worker-fg2viex6

Worker: 127

Comm: tcp://127.0.0.1:46823 Total threads: 1
Dashboard: http://127.0.0.1:36609/status Memory: 1.86 GiB
Nanny: tcp://127.0.0.1:33335
Local directory: /tmp/dask-scratch-space/worker-a76dx0k0
2025-08-25 06:05:39,725 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle fb588d4ed6c43ea1a205da0bba08d190 initialized by task ('rechunk-merge-rechunk-transfer-e2334a18789ec7046a7b7ca5a411b693', 0, 0, 87, 0) executed on worker tcp://127.0.0.1:43613
2025-08-25 07:44:33,300 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle fb588d4ed6c43ea1a205da0bba08d190 deactivated due to stimulus 'task-finished-1756100673.2888749'
2025-08-25 07:44:35,185 - distributed.nanny.memory - WARNING - Worker tcp://127.0.0.1:36075 (pid=17985) exceeded 95% memory budget. Restarting...
2025-08-25 07:44:35,332 - distributed.scheduler - WARNING - Removing worker 'tcp://127.0.0.1:36075' caused the cluster to lose already computed task(s), which will be recomputed elsewhere: {('analyze_block-adfc206b715e7091324cca25aa48e84f', 707, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 697, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 699, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 709, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 703, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 715, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 708, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 694, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 717, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 695, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 711, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 705, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 704, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 701, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 714, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 702, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 713, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 716, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 700, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 710, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 698, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 712, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 706, 0, 0), ('analyze_block-adfc206b715e7091324cca25aa48e84f', 696, 0, 0)} (stimulus_id='handle-worker-cleanup-1756100675.328182')
2025-08-25 07:44:35,542 - distributed.nanny - WARNING - Restarting worker
2025-08-25 07:44:37,505 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle fb588d4ed6c43ea1a205da0bba08d190 initialized by task ('rechunk-merge-rechunk-transfer-e2334a18789ec7046a7b7ca5a411b693', 0, 0, 102, 0) executed on worker tcp://127.0.0.1:43613
2025-08-25 08:35:18,293 - distributed.shuffle._scheduler_plugin - WARNING - Shuffle fb588d4ed6c43ea1a205da0bba08d190 deactivated due to stimulus 'task-finished-1756103718.2661896'
In [3]:
def pytides_to_df(pytides_tide: Tide) -> pd.DataFrame:
    constituent_names = [c.name.upper() for c in pytides_tide.model["constituent"]]
    return pd.DataFrame(pytides_tide.model, index=constituent_names).drop(
        "constituent",
        axis=1,
    )

def pytide_get_coefs(ts: pd.Series, resample: int = None) -> dict:
    if resample is not None:
        ts = ts.resample(f"{resample}min").mean()
        ts = ts.shift(freq=f"{resample / 2}min")  # Center the resampled points
    ts = ts.dropna()
    return Tide.decompose(ts.values, ts.index.to_pydatetime())[0]

def reduce_coef_to_fes(df: pd.DataFrame, cnst: list, verbose: bool = False):
    res = pd.DataFrame(0.0, index=cnst, columns=df.columns)
    common_constituents = df.index.intersection(cnst)
    res.loc[common_constituents] = df.loc[common_constituents]

    not_in_fes_df = df[~df.index.isin(cnst)]
    not_in_fes = not_in_fes_df.index.tolist()
    not_in_fes_amps = not_in_fes_df["amplitude"].round(3).tolist()
    missing_fes = set(cnst) - set(df.index)

    if verbose:
        print(f"Constituents found but not in FES: {not_in_fes}")
        print(f"Their amplitudes: {not_in_fes_amps}")
        if missing_fes:
            print(
                f"FES constituents missing from analysis (set to 0): {sorted(missing_fes)}",
            )

    return res
In [4]:
from natsort import natsorted
import glob

files = natsorted(glob.glob("/project/home/p200764/schism_runs/validation/schism3d/run4/20220*/outputs/out2d_*.nc"))
files[-5:]
Out[4]:
['/project/home/p200764/schism_runs/validation/schism3d/run4/202204/outputs/out2d_176.nc',
 '/project/home/p200764/schism_runs/validation/schism3d/run4/202204/outputs/out2d_177.nc',
 '/project/home/p200764/schism_runs/validation/schism3d/run4/202204/outputs/out2d_178.nc',
 '/project/home/p200764/schism_runs/validation/schism3d/run4/202204/outputs/out2d_179.nc',
 '/project/home/p200764/schism_runs/validation/schism3d/run4/202204/outputs/out2d_180.nc']
In [5]:
ds = xr.open_mfdataset(
    files,
    concat_dim="time",
    combine="nested",
    parallel=True
)
# ds = ds.chunk({"time": -1, "nSCHISM_hgrid_node": 5000})  # now safe for Dask
ds
Out[5]:
<xarray.Dataset> Size: 1TB
Dimensions:                  (time: 2880, one: 1, nSCHISM_hgrid_node: 2960823,
                              nSCHISM_hgrid_face: 5702272,
                              nMaxSCHISM_hgrid_face_nodes: 4,
                              nSCHISM_hgrid_edge: 8664341, two: 2)
Coordinates:
  * time                     (time) datetime64[ns] 23kB 2021-12-30T01:00:00 ....
    SCHISM_hgrid_node_x      (nSCHISM_hgrid_node) float64 24MB dask.array<chunksize=(2960823,), meta=np.ndarray>
    SCHISM_hgrid_node_y      (nSCHISM_hgrid_node) float64 24MB dask.array<chunksize=(2960823,), meta=np.ndarray>
    SCHISM_hgrid_face_x      (nSCHISM_hgrid_face) float64 46MB dask.array<chunksize=(5702272,), meta=np.ndarray>
    SCHISM_hgrid_face_y      (nSCHISM_hgrid_face) float64 46MB dask.array<chunksize=(5702272,), meta=np.ndarray>
    SCHISM_hgrid_edge_x      (nSCHISM_hgrid_edge) float64 69MB dask.array<chunksize=(8664341,), meta=np.ndarray>
    SCHISM_hgrid_edge_y      (nSCHISM_hgrid_edge) float64 69MB dask.array<chunksize=(8664341,), meta=np.ndarray>
Dimensions without coordinates: one, nSCHISM_hgrid_node, nSCHISM_hgrid_face,
                                nMaxSCHISM_hgrid_face_nodes,
                                nSCHISM_hgrid_edge, two
Data variables:
    minimum_depth            (time, one) float64 23kB dask.array<chunksize=(4, 1), meta=np.ndarray>
    SCHISM_hgrid             (time, one) |S1 3kB dask.array<chunksize=(4, 1), meta=np.ndarray>
    crs                      (time, one) int32 12kB dask.array<chunksize=(4, 1), meta=np.ndarray>
    depth                    (time, nSCHISM_hgrid_node) float32 34GB dask.array<chunksize=(4, 2960823), meta=np.ndarray>
    bottom_index_node        (time, nSCHISM_hgrid_node) int32 34GB dask.array<chunksize=(4, 2960823), meta=np.ndarray>
    SCHISM_hgrid_face_nodes  (time, nSCHISM_hgrid_face, nMaxSCHISM_hgrid_face_nodes) float64 526GB dask.array<chunksize=(4, 5702272, 4), meta=np.ndarray>
    SCHISM_hgrid_edge_nodes  (time, nSCHISM_hgrid_edge, two) float64 399GB dask.array<chunksize=(4, 8664341, 2), meta=np.ndarray>
    dryFlagNode              (time, nSCHISM_hgrid_node) float32 34GB dask.array<chunksize=(1, 2960823), meta=np.ndarray>
    elevation                (time, nSCHISM_hgrid_node) float32 34GB dask.array<chunksize=(1, 2960823), meta=np.ndarray>
    dryFlagElement           (time, nSCHISM_hgrid_face) float32 66GB dask.array<chunksize=(1, 2851136), meta=np.ndarray>
    dryFlagSide              (time, nSCHISM_hgrid_edge) float32 100GB dask.array<chunksize=(1, 2888114), meta=np.ndarray>
xarray.Dataset
    • time: 2880
    • one: 1
    • nSCHISM_hgrid_node: 2960823
    • nSCHISM_hgrid_face: 5702272
    • nMaxSCHISM_hgrid_face_nodes: 4
    • nSCHISM_hgrid_edge: 8664341
    • two: 2
    • time
      (time)
      datetime64[ns]
      2021-12-30T01:00:00 ... 2022-04-29
      i23d :
      0
      base_date :
      2021 12 30 0.00 0.00
      standard_name :
      time
      axis :
      T
      array(['2021-12-30T01:00:00.000000000', '2021-12-30T02:00:00.000000000',
             '2021-12-30T03:00:00.000000000', ..., '2022-04-28T22:00:00.000000000',
             '2022-04-28T23:00:00.000000000', '2022-04-29T00:00:00.000000000'],
            shape=(2880,), dtype='datetime64[ns]')
    • SCHISM_hgrid_node_x
      (nSCHISM_hgrid_node)
      float64
      dask.array<chunksize=(2960823,), meta=np.ndarray>
      axis :
      X
      location :
      node
      mesh :
      SCHISM_hgrid
      units :
      degree_E
      standard_name :
      longitude
      Array Chunk
      Bytes 22.59 MiB 22.59 MiB
      Shape (2960823,) (2960823,)
      Dask graph 1 chunks in 3595 graph layers
      Data type float64 numpy.ndarray
      2960823 1
    • SCHISM_hgrid_node_y
      (nSCHISM_hgrid_node)
      float64
      dask.array<chunksize=(2960823,), meta=np.ndarray>
      axis :
      Y
      location :
      node
      mesh :
      SCHISM_hgrid
      units :
      degree_N
      standard_name :
      latitude
      Array Chunk
      Bytes 22.59 MiB 22.59 MiB
      Shape (2960823,) (2960823,)
      Dask graph 1 chunks in 3595 graph layers
      Data type float64 numpy.ndarray
      2960823 1
    • SCHISM_hgrid_face_x
      (nSCHISM_hgrid_face)
      float64
      dask.array<chunksize=(5702272,), meta=np.ndarray>
      axis :
      X
      location :
      face
      mesh :
      SCHISM_hgrid
      units :
      degree_E
      standard_name :
      longitude
      Array Chunk
      Bytes 43.50 MiB 43.50 MiB
      Shape (5702272,) (5702272,)
      Dask graph 1 chunks in 3595 graph layers
      Data type float64 numpy.ndarray
      5702272 1
    • SCHISM_hgrid_face_y
      (nSCHISM_hgrid_face)
      float64
      dask.array<chunksize=(5702272,), meta=np.ndarray>
      axis :
      Y
      location :
      face
      mesh :
      SCHISM_hgrid
      units :
      degree_N
      standard_name :
      latitude
      Array Chunk
      Bytes 43.50 MiB 43.50 MiB
      Shape (5702272,) (5702272,)
      Dask graph 1 chunks in 3595 graph layers
      Data type float64 numpy.ndarray
      5702272 1
    • SCHISM_hgrid_edge_x
      (nSCHISM_hgrid_edge)
      float64
      dask.array<chunksize=(8664341,), meta=np.ndarray>
      axis :
      X
      location :
      edge
      mesh :
      SCHISM_hgrid
      units :
      degree_E
      standard_name :
      longitude
      Array Chunk
      Bytes 66.10 MiB 66.10 MiB
      Shape (8664341,) (8664341,)
      Dask graph 1 chunks in 3595 graph layers
      Data type float64 numpy.ndarray
      8664341 1
    • SCHISM_hgrid_edge_y
      (nSCHISM_hgrid_edge)
      float64
      dask.array<chunksize=(8664341,), meta=np.ndarray>
      axis :
      Y
      location :
      edge
      mesh :
      SCHISM_hgrid
      units :
      degree_N
      standard_name :
      latitude
      Array Chunk
      Bytes 66.10 MiB 66.10 MiB
      Shape (8664341,) (8664341,)
      Dask graph 1 chunks in 3595 graph layers
      Data type float64 numpy.ndarray
      8664341 1
    • minimum_depth
      (time, one)
      float64
      dask.array<chunksize=(4, 1), meta=np.ndarray>
      units :
      m
      Array Chunk
      Bytes 22.50 kiB 32 B
      Shape (2880, 1) (4, 1)
      Dask graph 720 chunks in 2161 graph layers
      Data type float64 numpy.ndarray
      1 2880
    • SCHISM_hgrid
      (time, one)
      |S1
      dask.array<chunksize=(4, 1), meta=np.ndarray>
      long_name :
      Topology data of 2d unstructured mesh
      topology_dimension :
      2
      cf_role :
      mesh_topology
      node_coordinates :
      SCHISM_hgrid_node_x SCHISM_hgrid_node_y
      edge_coordinates :
      SCHISM_hgrid_edge_x SCHISM_hgrid_edge_y
      face_coordinates :
      SCHISM_hgrid_face_x SCHISM_hgrid_face_y
      edge_node_connectivity :
      SCHISM_hgrid_edge_nodes
      face_node_connectivity :
      SCHISM_hgrid_face_nodes
      Array Chunk
      Bytes 2.81 kiB 4 B
      Shape (2880, 1) (4, 1)
      Dask graph 720 chunks in 2881 graph layers
      Data type |S1 numpy.ndarray
      1 2880
    • crs
      (time, one)
      int32
      dask.array<chunksize=(4, 1), meta=np.ndarray>
      long_name :
      Coordinate reference system (CRS) definition
      grid_mapping_name :
      latitude_longitude
      longitude_of_prime_meridian :
      0.0
      semi_major_axis :
      6.378137e+06
      inverse_flattening :
      298.25723
      Array Chunk
      Bytes 11.25 kiB 16 B
      Shape (2880, 1) (4, 1)
      Dask graph 720 chunks in 2161 graph layers
      Data type int32 numpy.ndarray
      1 2880
    • depth
      (time, nSCHISM_hgrid_node)
      float32
      dask.array<chunksize=(4, 2960823), meta=np.ndarray>
      units :
      m
      axis :
      Z
      positive :
      down
      location :
      node
      grid_mapping :
      crs
      mesh :
      SCHISM_hgrid
      Array Chunk
      Bytes 31.77 GiB 45.18 MiB
      Shape (2880, 2960823) (4, 2960823)
      Dask graph 720 chunks in 2161 graph layers
      Data type float32 numpy.ndarray
      2960823 2880
    • bottom_index_node
      (time, nSCHISM_hgrid_node)
      int32
      dask.array<chunksize=(4, 2960823), meta=np.ndarray>
      location :
      node
      grid_mapping :
      crs
      mesh :
      SCHISM_hgrid
      Array Chunk
      Bytes 31.77 GiB 45.18 MiB
      Shape (2880, 2960823) (4, 2960823)
      Dask graph 720 chunks in 2161 graph layers
      Data type int32 numpy.ndarray
      2960823 2880
    • SCHISM_hgrid_face_nodes
      (time, nSCHISM_hgrid_face, nMaxSCHISM_hgrid_face_nodes)
      float64
      dask.array<chunksize=(4, 5702272, 4), meta=np.ndarray>
      start_index :
      1
      cf_role :
      face_node_connectivity
      Array Chunk
      Bytes 489.43 GiB 696.08 MiB
      Shape (2880, 5702272, 4) (4, 5702272, 4)
      Dask graph 720 chunks in 2161 graph layers
      Data type float64 numpy.ndarray
      4 5702272 2880
    • SCHISM_hgrid_edge_nodes
      (time, nSCHISM_hgrid_edge, two)
      float64
      dask.array<chunksize=(4, 8664341, 2), meta=np.ndarray>
      start_index :
      1
      cf_role :
      edge_node_connectivity
      Array Chunk
      Bytes 371.83 GiB 528.83 MiB
      Shape (2880, 8664341, 2) (4, 8664341, 2)
      Dask graph 720 chunks in 2161 graph layers
      Data type float64 numpy.ndarray
      2 8664341 2880
    • dryFlagNode
      (time, nSCHISM_hgrid_node)
      float32
      dask.array<chunksize=(1, 2960823), meta=np.ndarray>
      i23d :
      1
      location :
      node
      grid_mapping :
      crs
      mesh :
      SCHISM_hgrid
      Array Chunk
      Bytes 31.77 GiB 11.29 MiB
      Shape (2880, 2960823) (1, 2960823)
      Dask graph 2880 chunks in 1441 graph layers
      Data type float32 numpy.ndarray
      2960823 2880
    • elevation
      (time, nSCHISM_hgrid_node)
      float32
      dask.array<chunksize=(1, 2960823), meta=np.ndarray>
      i23d :
      1
      location :
      node
      grid_mapping :
      crs
      mesh :
      SCHISM_hgrid
      Array Chunk
      Bytes 31.77 GiB 11.29 MiB
      Shape (2880, 2960823) (1, 2960823)
      Dask graph 2880 chunks in 1441 graph layers
      Data type float32 numpy.ndarray
      2960823 2880
    • dryFlagElement
      (time, nSCHISM_hgrid_face)
      float32
      dask.array<chunksize=(1, 2851136), meta=np.ndarray>
      i23d :
      4
      location :
      face
      grid_mapping :
      crs
      mesh :
      SCHISM_hgrid
      Array Chunk
      Bytes 61.18 GiB 10.88 MiB
      Shape (2880, 5702272) (1, 2851136)
      Dask graph 5760 chunks in 1441 graph layers
      Data type float32 numpy.ndarray
      5702272 2880
    • dryFlagSide
      (time, nSCHISM_hgrid_edge)
      float32
      dask.array<chunksize=(1, 2888114), meta=np.ndarray>
      i23d :
      7
      location :
      edge
      grid_mapping :
      crs
      mesh :
      SCHISM_hgrid
      Array Chunk
      Bytes 92.96 GiB 11.02 MiB
      Shape (2880, 8664341) (1, 2888114)
      Dask graph 8640 chunks in 1441 graph layers
      Data type float32 numpy.ndarray
      8664341 2880
    • time
      PandasIndex
      PandasIndex(DatetimeIndex(['2021-12-30 01:00:00', '2021-12-30 02:00:00',
                     '2021-12-30 03:00:00', '2021-12-30 04:00:00',
                     '2021-12-30 05:00:00', '2021-12-30 06:00:00',
                     '2021-12-30 07:00:00', '2021-12-30 08:00:00',
                     '2021-12-30 09:00:00', '2021-12-30 10:00:00',
                     ...
                     '2022-04-28 15:00:00', '2022-04-28 16:00:00',
                     '2022-04-28 17:00:00', '2022-04-28 18:00:00',
                     '2022-04-28 19:00:00', '2022-04-28 20:00:00',
                     '2022-04-28 21:00:00', '2022-04-28 22:00:00',
                     '2022-04-28 23:00:00', '2022-04-29 00:00:00'],
                    dtype='datetime64[ns]', name='time', length=2880, freq=None))
In [6]:
elev = ds.elevation.chunk({"time": -1, "nSCHISM_hgrid_node": 500})
elev
Out[6]:
<xarray.DataArray 'elevation' (time: 2880, nSCHISM_hgrid_node: 1480411)> Size: 17GB
dask.array<rechunk-p2p, shape=(2880, 1480411), dtype=float32, chunksize=(2880, 500), chunktype=numpy.ndarray>
Coordinates:
  * time                 (time) datetime64[ns] 23kB 2021-12-30T01:00:00 ... 2...
    SCHISM_hgrid_node_x  (nSCHISM_hgrid_node) float64 12MB dask.array<chunksize=(500,), meta=np.ndarray>
    SCHISM_hgrid_node_y  (nSCHISM_hgrid_node) float64 12MB dask.array<chunksize=(500,), meta=np.ndarray>
Dimensions without coordinates: nSCHISM_hgrid_node
Attributes:
    i23d:          1
    location:      node
    grid_mapping:  crs
    mesh:          SCHISM_hgrid
xarray.DataArray
'elevation'
  • time: 2880
  • nSCHISM_hgrid_node: 1480411
  • dask.array<chunksize=(2880, 500), meta=np.ndarray>
    Array Chunk
    Bytes 15.88 GiB 5.49 MiB
    Shape (2880, 1480411) (2880, 500)
    Dask graph 2961 chunks in 1444 graph layers
    Data type float32 numpy.ndarray
    1480411 2880
    • time
      (time)
      datetime64[ns]
      2021-12-30T01:00:00 ... 2022-04-29
      i23d :
      0
      base_date :
      2021 12 30 0.00 0.00
      standard_name :
      time
      axis :
      T
      array(['2021-12-30T01:00:00.000000000', '2021-12-30T02:00:00.000000000',
             '2021-12-30T03:00:00.000000000', ..., '2022-04-28T22:00:00.000000000',
             '2022-04-28T23:00:00.000000000', '2022-04-29T00:00:00.000000000'],
            shape=(2880,), dtype='datetime64[ns]')
    • SCHISM_hgrid_node_x
      (nSCHISM_hgrid_node)
      float64
      dask.array<chunksize=(500,), meta=np.ndarray>
      axis :
      X
      location :
      node
      mesh :
      SCHISM_hgrid
      units :
      degree_E
      standard_name :
      longitude
      Array Chunk
      Bytes 11.29 MiB 3.91 kiB
      Shape (1480411,) (500,)
      Dask graph 2961 chunks in 3597 graph layers
      Data type float64 numpy.ndarray
      1480411 1
    • SCHISM_hgrid_node_y
      (nSCHISM_hgrid_node)
      float64
      dask.array<chunksize=(500,), meta=np.ndarray>
      axis :
      Y
      location :
      node
      mesh :
      SCHISM_hgrid
      units :
      degree_N
      standard_name :
      latitude
      Array Chunk
      Bytes 11.29 MiB 3.91 kiB
      Shape (1480411,) (500,)
      Dask graph 2961 chunks in 3597 graph layers
      Data type float64 numpy.ndarray
      1480411 1
    • time
      PandasIndex
      PandasIndex(DatetimeIndex(['2021-12-30 01:00:00', '2021-12-30 02:00:00',
                     '2021-12-30 03:00:00', '2021-12-30 04:00:00',
                     '2021-12-30 05:00:00', '2021-12-30 06:00:00',
                     '2021-12-30 07:00:00', '2021-12-30 08:00:00',
                     '2021-12-30 09:00:00', '2021-12-30 10:00:00',
                     ...
                     '2022-04-28 15:00:00', '2022-04-28 16:00:00',
                     '2022-04-28 17:00:00', '2022-04-28 18:00:00',
                     '2022-04-28 19:00:00', '2022-04-28 20:00:00',
                     '2022-04-28 21:00:00', '2022-04-28 22:00:00',
                     '2022-04-28 23:00:00', '2022-04-29 00:00:00'],
                    dtype='datetime64[ns]', name='time', length=2880, freq=None))
  • i23d :
    1
    location :
    node
    grid_mapping :
    crs
    mesh :
    SCHISM_hgrid
In [7]:
import dask.array as da
FULL = [
    "M2", "S2", "N2", "K2", "2N2", "L2", "T2", "R2", "NU2", "MU2", "EPS2", "LAMBDA2",  # Semi-diurnal (twice daily)
    "K1", "O1", "P1", "Q1", "J1", "S1",  # Diurnal (once daily)
    "MF", "MM", "MSF", "SA", "SSA", "MSQM", "MTM",  # Long period (fortnightly to annual)
    "M4", "MS4", "M6", "MN4", "N4", "S4", "M8", "M3", "MKS2",  # Short period (higher harmonics)
]

metrics = ["amplitude","phase"]

def analyze_node(ts_np: np.ndarray, time_index: pd.DatetimeIndex) -> np.ndarray:

    ts = pd.Series(ts_np, index=time_index, name="elev")
    df = pytides_to_df(pytide_get_coefs(ts, 60))
    df = reduce_coef_to_fes(df, cnst=FULL)
    # df = df.loc[constituents, metrics]  # enforce order
    return df.to_numpy()

def analyze_block(block: np.ndarray, time_block: np.ndarray) -> np.ndarray:
    time_index = pd.DatetimeIndex(time_block)
    return np.stack(
        [analyze_node(block[:, i], time_index) for i in range(block.shape[1])],
        axis=0
    )

nconst = len(FULL)
nmetrics = len(metrics)

results = da.map_blocks(
    analyze_block,
    elev.data,            # (Nt, Nnodes)
    elev["time"],
    dtype=float,
    drop_axis=0,          # drop time axis
    new_axis=[1, 2],      # add constituent, metric axes
    chunks=(elev.chunks[1], nconst, nmetrics)
)
results
Out[7]:
Array Chunk
Bytes 768.04 MiB 265.62 kiB
Shape (1480411, 34, 2) (500, 34, 2)
Dask graph 2961 chunks in 1445 graph layers
Data type float64 numpy.ndarray
2 34 1480411
In [8]:
coef_da = xr.DataArray(
    results,
    dims=("nSCHISM_hgrid_node", "constituent", "metric"),
    coords={
        "nSCHISM_hgrid_node": elev.nSCHISM_hgrid_node,
        "constituent": FULL,
        "metric": metrics,
        "lon": elev.SCHISM_hgrid_node_x,
        "lat": elev.SCHISM_hgrid_node_y,
    },
    name="tidal_coefs"
)
coef_da
Out[8]:
<xarray.DataArray 'tidal_coefs' (nSCHISM_hgrid_node: 1480411, constituent: 34,
                                 metric: 2)> Size: 805MB
dask.array<analyze_block, shape=(1480411, 34, 2), dtype=float64, chunksize=(500, 34, 2), chunktype=numpy.ndarray>
Coordinates:
  * nSCHISM_hgrid_node  (nSCHISM_hgrid_node) int64 12MB 0 1 ... 1480409 1480410
  * constituent         (constituent) <U7 952B 'M2' 'S2' 'N2' ... 'M3' 'MKS2'
  * metric              (metric) <U9 72B 'amplitude' 'phase'
    lon                 (nSCHISM_hgrid_node) float64 12MB dask.array<chunksize=(500,), meta=np.ndarray>
    lat                 (nSCHISM_hgrid_node) float64 12MB dask.array<chunksize=(500,), meta=np.ndarray>
xarray.DataArray
'tidal_coefs'
  • nSCHISM_hgrid_node: 1480411
  • constituent: 34
  • metric: 2
  • dask.array<chunksize=(500, 34, 2), meta=np.ndarray>
    Array Chunk
    Bytes 768.04 MiB 265.62 kiB
    Shape (1480411, 34, 2) (500, 34, 2)
    Dask graph 2961 chunks in 1445 graph layers
    Data type float64 numpy.ndarray
    2 34 1480411
    • nSCHISM_hgrid_node
      (nSCHISM_hgrid_node)
      int64
      0 1 2 3 ... 1480408 1480409 1480410
      array([      0,       1,       2, ..., 1480408, 1480409, 1480410],
            shape=(1480411,))
    • constituent
      (constituent)
      <U7
      'M2' 'S2' 'N2' ... 'M8' 'M3' 'MKS2'
      array(['M2', 'S2', 'N2', 'K2', '2N2', 'L2', 'T2', 'R2', 'NU2', 'MU2', 'EPS2',
             'LAMBDA2', 'K1', 'O1', 'P1', 'Q1', 'J1', 'S1', 'MF', 'MM', 'MSF', 'SA',
             'SSA', 'MSQM', 'MTM', 'M4', 'MS4', 'M6', 'MN4', 'N4', 'S4', 'M8', 'M3',
             'MKS2'], dtype='<U7')
    • metric
      (metric)
      <U9
      'amplitude' 'phase'
      array(['amplitude', 'phase'], dtype='<U9')
    • lon
      (nSCHISM_hgrid_node)
      float64
      dask.array<chunksize=(500,), meta=np.ndarray>
      axis :
      X
      location :
      node
      mesh :
      SCHISM_hgrid
      units :
      degree_E
      standard_name :
      longitude
      Array Chunk
      Bytes 11.29 MiB 3.91 kiB
      Shape (1480411,) (500,)
      Dask graph 2961 chunks in 3597 graph layers
      Data type float64 numpy.ndarray
      1480411 1
    • lat
      (nSCHISM_hgrid_node)
      float64
      dask.array<chunksize=(500,), meta=np.ndarray>
      axis :
      Y
      location :
      node
      mesh :
      SCHISM_hgrid
      units :
      degree_N
      standard_name :
      latitude
      Array Chunk
      Bytes 11.29 MiB 3.91 kiB
      Shape (1480411,) (500,)
      Dask graph 2961 chunks in 3597 graph layers
      Data type float64 numpy.ndarray
      1480411 1
    • nSCHISM_hgrid_node
      PandasIndex
      PandasIndex(RangeIndex(start=0, stop=1480411, step=1, name='nSCHISM_hgrid_node'))
    • constituent
      PandasIndex
      PandasIndex(Index(['M2', 'S2', 'N2', 'K2', '2N2', 'L2', 'T2', 'R2', 'NU2', 'MU2', 'EPS2',
             'LAMBDA2', 'K1', 'O1', 'P1', 'Q1', 'J1', 'S1', 'MF', 'MM', 'MSF', 'SA',
             'SSA', 'MSQM', 'MTM', 'M4', 'MS4', 'M6', 'MN4', 'N4', 'S4', 'M8', 'M3',
             'MKS2'],
            dtype='object', name='constituent'))
    • metric
      PandasIndex
      PandasIndex(Index(['amplitude', 'phase'], dtype='object', name='metric'))
In [9]:
from dask.diagnostics import ProgressBar
import warnings
warnings.filterwarnings("ignore")

with ProgressBar():
    coef_result = coef_da.compute()
2025-08-25 07:44:35,066 - distributed.worker.memory - WARNING - Worker is at 83% memory usage. Pausing worker.  Process memory: 1.56 GiB -- Worker memory limit: 1.86 GiB
2025-08-25 08:35:20,083 - distributed.worker.memory - WARNING - Worker is at 81% memory usage. Pausing worker.  Process memory: 1.51 GiB -- Worker memory limit: 1.86 GiB
2025-08-25 08:35:21,141 - distributed.worker.memory - WARNING - Worker is at 63% memory usage. Resuming worker. Process memory: 1.19 GiB -- Worker memory limit: 1.86 GiB
In [10]:
coef_ds = coef_result.to_dataset(dim="metric")
In [11]:
coef_ds.to_netcdf("run4_tides.nc")
In [12]:
import hvplot.xarray
In [13]:
m2 = coef_ds.sel(constituent = "M2")
m2
Out[13]:
<xarray.Dataset> Size: 59MB
Dimensions:             (nSCHISM_hgrid_node: 1480411)
Coordinates:
  * nSCHISM_hgrid_node  (nSCHISM_hgrid_node) int64 12MB 0 1 ... 1480409 1480410
    constituent         <U7 28B 'M2'
    lon                 (nSCHISM_hgrid_node) float64 12MB 179.1 178.8 ... -65.44
    lat                 (nSCHISM_hgrid_node) float64 12MB 69.3 69.4 ... 85.8
Data variables:
    amplitude           (nSCHISM_hgrid_node) float64 12MB 0.1405 ... 0.04601
    phase               (nSCHISM_hgrid_node) float64 12MB 58.31 56.19 ... 94.66
xarray.Dataset
    • nSCHISM_hgrid_node: 1480411
    • nSCHISM_hgrid_node
      (nSCHISM_hgrid_node)
      int64
      0 1 2 3 ... 1480408 1480409 1480410
      array([      0,       1,       2, ..., 1480408, 1480409, 1480410],
            shape=(1480411,))
    • constituent
      ()
      <U7
      'M2'
      array('M2', dtype='<U7')
    • lon
      (nSCHISM_hgrid_node)
      float64
      179.1 178.8 179.0 ... -65.19 -65.44
      axis :
      X
      location :
      node
      mesh :
      SCHISM_hgrid
      units :
      degree_E
      standard_name :
      longitude
      array([179.057511, 178.779783, 178.954564, ..., -65.003016, -65.189936,
             -65.438228], shape=(1480411,))
    • lat
      (nSCHISM_hgrid_node)
      float64
      69.3 69.4 69.36 ... 85.81 85.8
      axis :
      Y
      location :
      node
      mesh :
      SCHISM_hgrid
      units :
      degree_N
      standard_name :
      latitude
      array([69.299593, 69.402505, 69.359604, ..., 85.814531, 85.813495,
             85.800741], shape=(1480411,))
    • amplitude
      (nSCHISM_hgrid_node)
      float64
      0.1405 0.1376 ... 0.04595 0.04601
      array([0.14053109, 0.13756482, 0.1398321 , ..., 0.04597918, 0.04595259,
             0.04601121], shape=(1480411,))
    • phase
      (nSCHISM_hgrid_node)
      float64
      58.31 56.19 56.84 ... 94.75 94.66
      array([58.3126187 , 56.19087237, 56.83799297, ..., 94.80090538,
             94.74824717, 94.65566572], shape=(1480411,))
    • nSCHISM_hgrid_node
      PandasIndex
      PandasIndex(RangeIndex(start=0, stop=1480411, step=1, name='nSCHISM_hgrid_node'))
In [15]:
# m2.hvplot.scatter(x='lon', y="lat",c="amplitude" )