-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathTelemac3D_to_Xarray.py
209 lines (194 loc) · 9.77 KB
/
Telemac3D_to_Xarray.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
import numpy as np
from datetime import datetime, timedelta
import sys, os, logging
from dask.array import from_array
logger = logging.getLogger(__name__)
import xarray as xr
import zarr
try:
from data_manip.formats.selafin import Selafin
except ImportError:
# Activating PYTEL
try:
pytel = os.path.join(os.environ['HOMETEL'], 'scripts', 'python3')
if pytel not in sys.path:
logger.warning('adding telemac python scripts to PYTHONPATH: %s' %
pytel)
sys.path.append(pytel)
from data_manip.formats.selafin import Selafin
except (KeyError, ImportError):
logger.error(
'Telemac python scripts cannot be found. These are distributed together with the Telemac source code. This reader will not work.'
)
class T3D_Xr:
def __init__(self, filename=None, store=None,name=None, start_time=None):
# , n_workers=8, threads=2, memory_limit='1GB'):
def vardic(vars_slf):
"""
Match the selafin variables from Telemac 3D to the variables used in
OpenDrift.
"""
# Define all the variables used in OpenDrift as a dictionary
# This is done to the best of our knowledge
Vars_OD = {
'VELOCITY U ': 'x_sea_water_velocity',
'VELOCITY V ': 'y_sea_water_velocity',
'VELOCITY W ': 'upward_sea_water_velocity',
'TURBULENT ENERGY': 'turbulent_kinetic_energy',
'TEMPERATURE ': 'sea_water_temperature',
'SALINITY ': 'sea_water_salinity',
'NUZ FOR VELOCITY': 'ocean_vertical_diffusivity',
'NUX FOR VELOCITY': 'horizontal_diffusivity',
'ELEVATION Z ': 'altitude',
}
No_OD_equiv = {
'x_wind', 'y_wind', 'wind_speed',
'sea_floor_depth_below_sea_level', 'wind_from_direction',
'sea_ice_x_velocity', 'sea_ice_y_velocity',
'sea_surface_wave_significant_height',
'sea_surface_wave_stokes_drift_x_velocity',
'sea_surface_wave_stokes_drift_y_velocity',
'sea_surface_wave_period_at_variance_spectral_density_maximum',
'sea_surface_wave_mean_period_from_variance_spectral_density_second_frequency_moment',
'sea_ice_area_fraction', 'surface_downward_x_stress',
'surface_downward_y_stress', 'turbulent_generic_length_scale'
}
No_Telemac_equiv = {
'NUY FOR VELOCITY',
'DISSIPATION ',
}
variables = []
var_idx = []
for i, var in enumerate(vars_slf):
try:
variables.append(Vars_OD[var])
var_idx.append(i)
except:
logger.info(
"Selafin variable {} has no equivalent in OpenDrift".
format(var))
return np.array(variables), np.array(var_idx)
def create_array():
return xr.Dataset(
attrs={
'Conventions': 'CF-1.6',
'standard_name_vocabulary': 'CF-1.6',
'history': 'Created ' + str(datetime.now()),
'source': 'Telemac 3D',
'CoordinateSystem': "Cartesian"
},)
# client = Client(n_workers=n_workers, threads_per_worker=threads,
# memory_limit=memory_limit)
self.name = name if name is not None else infile
# logger.info('Opening dataset: %s' % filename)
self.slf = Selafin(filename)
self.ds=create_array()
self.ds.attrs['title']= self.slf.title
self.ds.attrs['original file']=self.slf.file['name']
# U-grid conventions
self.ds["Mesh2D"]=2
self.ds["Mesh2D"].attrs={
'cf_role': "mesh_topology",
'long_name': "Topology data of 2D unstructured mesh" ,
'topology_dimension': 2 ,
'node_coordinates': ['node_x', 'node_y'] ,
'face_node_connectivity': "face_nodes" ,
'face_dimension': 'face',
}
# CF conventions for projections
self.ds['crs']=0
self.ds['crs'].attrs={
'grid_mapping_name':"WGS 84 / UTM zone 30N",
'semi_major_axis' : 6378137.0, #WGS84
'inverse_flattening': 298.25722356, #WGS84
'scale_factor_at_central_meridian': 0.9996,
'longitude_of_central_meridian': 3,
'latitude_of_projection_origin': 0,
'false_easting': 500000,
'false_northing':0}
### time management
# Slf files have a wrong start time due to a formating error in Telemac
if start_time is not None:
if type(start_time) is datetime:
self.start_time = start_time
else:
logger.warning(
"The specified start time is not a datetime object")
else:
logger.info("loading the datetime from the selafin file")
self.start_time = datetime(self.slf.datetime[0],
self.slf.datetime[1],
self.slf.datetime[2],
self.slf.datetime[3],
self.slf.datetime[4])
self.times = []
for i in range(len(self.slf.tags['times'])):
self.times.append(self.start_time +
timedelta(seconds=self.slf.tags['times'][i]))
self.ds.coords['time']= ('time', self.times,{
'standard_name': 'time',
'long_name': 'time',
'axis': 'T',
})
# for OpenDrift
self.ds.attrs['proj4']='+proj=utm +zone=30 +datum=WGS84 +units=m +no_defs '
# Building Coordinates
self.ds.coords['node_x']=(['node'], self.slf.meshx,{
'units':'m',
'standard_name': 'projection_x_coordinate',
'axis': 'X',
'grid_mapping' : 'crs'
})
self.ds.coords['node_y']=(['node'], self.slf.meshy,{
'units':'m',
'standard_name': 'projection_y_coordinate',
'axis': 'Y',
'grid_mapping' : 'crs'
})
self.ds.coords['layer']=('layer', range(self.slf.nplan))
# We don't know if the indexing is anti-clockwise will trust T3D...
self.ds['face_nodes']=(['face','max_face_nodes'],
self.slf.ikle2,{
'cf_role': "face_node_connectivity",
'long_name': "Maps every triangular face to its three corner nodes",
'start_index': 0 })
self.ds['volume_node_connectivity']=(['cell','max_cell_node'],
self.slf.ikle3, {
'cf_role': "volume_node_connectivity",
'long_name': "Maps every volume to its corner nodes.",
'start_index': 0})
if store == None:
store=filename+'.zarr'
self.ds.to_zarr(store, mode='a') #write zarr before populating variables
print(datetime.now().strftime('Time %H-%M: ')+ \
'Zarr archive {} has been populated with dimensions and CF convention parameters'.format(filename+'.zarr'))
# initialise the variable
self.variables, self.var_idx = vardic(self.slf.varnames)
for i in range(len(self.var_idx)):
print(datetime.now().strftime('Time %H-%M: ')+'Processing variable {}'.format(self.variables[i]))
buff=np.zeros((len(self.times), self.slf.nplan, \
self.slf.npoin2)) #from array too heavy....
unit=self.slf.varunits[self.var_idx[i]].strip()
for t in range(len(self.times)):
buff[t]=self.slf.get_variables_at(t,[self.var_idx[i]]). \
reshape((self.slf.nplan, self.slf.npoin2))
ds2=xr.Dataset({self.variables[i]:(('time','layer','node'),
buff ,
{'units':unit,
'standard_name':self.variables[i]})})
print(datetime.now().strftime('Time %H-%M: ')+'\tVariable processed, writing zarr path...')
#self.ds=xr.merge([self.ds,ds2])
ds2.to_zarr(store, mode='a')
print(datetime.now().strftime('Time %H-%M: ')+'\tVariable written to zarr')
ds2.close()
print(datetime.now().strftime('Time %H-%M: ')+'All variable written, consolidating metadata')
zarr.consolidate_metadata(store)
print(datetime.now().strftime('Time %H-%M: ')+store+ ' ready to be uplaoded on a cloud.')
def write_array(self, filename, force_overwrite=False):
# logger.info('Writting Xarray dataset to zarr')
if force_overwrite==False:
mode='w-'
else:
mode='w'
self.ds.to_zarr(filename, mode=mode, consolidated=True)
# logger.info('zarr store completed')