tpreprocess.py - pism - [fork] customized build of PISM, the parallel ice sheet model (tillflux branch)
(HTM) git clone git://src.adamsgaard.dk/pism
(DIR) Log
(DIR) Files
(DIR) Refs
(DIR) LICENSE
---
tpreprocess.py (8847B)
---
1 #!/usr/bin/env python3
2 from sys import exit
3
4 # Import all necessary modules here so that if it fails, it fails early.
5 try:
6 import netCDF4 as NC
7 except:
8 print("netCDF4 is not installed!")
9 exit(1)
10
11 import subprocess
12 import numpy as np
13 import os
14
15 smb_name = "climatic_mass_balance"
16 temp_name = "ice_surface_temp"
17
18
19 def run(commands):
20 """Run a list of commands (or one command given as a string)."""
21 if isinstance(commands, (list, tuple)):
22 for cmd in commands:
23 print("Running '%s'..." % cmd)
24 subprocess.call(cmd.split(' '))
25 else:
26 run([commands])
27
28
29 def preprocess_ice_velocity():
30 """
31 Download and preprocess the ~332Mb Antarctic ice velocity dataset from NASA MEASURES project
32 http://nsidc.org/data/nsidc-0484.html
33 """
34 url = " ftp://n5eil01u.ecs.nsidc.org/SAN/MEASURES/NSIDC-0484.001/1996.01.01/"
35 input_filename = "antarctica_ice_velocity_900m.nc"
36 output_filename = os.path.splitext(input_filename)[0] + "_cutout.nc"
37
38 commands = ["ncrename -d nx,x -d ny,y -O %s %s" % (input_filename, input_filename)]
39
40 if not os.path.exists(input_filename):
41 print("Please downlaod the InSAR velocity dataset from http://nsidc.org/data/nsidc-0484.html")
42 print("Version 1 (900m spacing) can be found here: https://n5eil01u.ecs.nsidc.org/MEASURES/NSIDC-0484.001/")
43 exit(1)
44
45 run(commands)
46
47 nc = NC.Dataset(input_filename, 'a')
48
49 # Create x and y coordinate variables and set projection parameters; cut
50 # out the Ross area.
51
52 # Metadata provided with the dataset describes the *full* grid, so it is a
53 # lot easier to modify this file instead of adding grid information to the
54 # "cutout" file.
55 if 'x' not in nc.variables and 'y' not in nc.variables:
56 nx = nc.nx
57 ny = nc.ny
58 x_min = float(nc.xmin.strip().split(' ')[0])
59 y_max = float(nc.ymax.strip().split(' ')[0])
60 x_max = y_max
61 y_min = x_min
62
63 x = np.linspace(x_min, x_max, nx)
64 y = np.linspace(y_max, y_min, ny)
65
66 nc.projection = "+proj=stere +ellps=WGS84 +datum=WGS84 +lon_0=0 +lat_0=-90 +lat_ts=-71 +units=m"
67
68 try:
69 x_var = nc.createVariable('x', 'f8', ('x',))
70 y_var = nc.createVariable('y', 'f8', ('y',))
71 except Exception as e:
72 x_var = nc.variables['x']
73 y_var = nc.variables['y']
74
75 x_var[:] = x
76 y_var[:] = y
77
78 x_var.units = "meters"
79 x_var.standard_name = "projection_x_coordinate"
80
81 y_var.units = "meters"
82 y_var.standard_name = "projection_y_coordinate"
83
84 nc.close()
85
86 if not os.path.exists(output_filename):
87 # modify this command to cut-out a different region
88 cmd = "ncks -d x,2200,3700 -d y,3500,4700 -O %s %s" % (input_filename, output_filename)
89 run(cmd)
90
91 nc = NC.Dataset(output_filename, 'a')
92
93 # fix units of 'vx' and 'vy'
94 nc.variables['vx'].units = "m / year"
95 nc.variables['vy'].units = "m / year"
96
97 # Compute and save the velocity magnitude
98 if 'magnitude' not in nc.variables:
99 vx = nc.variables['vx'][:]
100 vy = nc.variables['vy'][:]
101
102 v_magnitude = np.zeros_like(vx)
103
104 v_magnitude = np.sqrt(vx ** 2 + vy ** 2)
105
106 magnitude = nc.createVariable('v_magnitude', 'f8', ('y', 'x'))
107 magnitude.units = "m / year"
108
109 magnitude[:] = v_magnitude
110
111 nc.close()
112
113 return output_filename
114
115
116 def preprocess_albmap():
117 """
118 Download and preprocess the ~16Mb ALBMAP dataset from http://doi.pangaea.de/10.1594/PANGAEA.734145
119 """
120 url = "http://store.pangaea.de/Publications/LeBrocq_et_al_2010/ALBMAPv1.nc.zip"
121 input_filename = "ALBMAPv1.nc"
122 output_filename = os.path.splitext(input_filename)[0] + "_cutout.nc"
123
124 commands = ["wget -nc %s" % url, # download
125 "unzip -n %s.zip" % input_filename, # unpack
126 # modify this command to cut out a different region
127 "ncks -O -d x1,439,649 -d y1,250,460 %s %s" % (input_filename, output_filename), # cut out
128 "ncks -O -v usrf,lsrf,topg,temp,acca,mask %s %s" % (output_filename, output_filename), # trim
129 "ncrename -O -d x1,x -d y1,y -v x1,x -v y1,y %s" % output_filename, # fix metadata
130 "ncrename -O -v temp,%s -v acca,%s %s" % (temp_name, smb_name, output_filename)]
131
132 run(commands)
133
134 nc = NC.Dataset(output_filename, 'a')
135
136 # fix acab
137 rho_ice = 910.0 # kg m-3
138 acab = nc.variables[smb_name]
139 acab.standard_name = "land_ice_surface_specific_mass_balance_flux"
140 SMB = acab[:]
141 SMB[SMB == -9999] = 0
142 # convert from m/year to kg m-2 / year:
143 acab[:] = SMB * rho_ice
144 acab.units = "kg m-2 / year"
145
146 # fix artm and topg
147 nc.variables[temp_name].units = "Celsius"
148 nc.variables["topg"].standard_name = "bedrock_altitude"
149
150 # compute ice thickness
151 if 'thk' not in nc.variables:
152 usrf = nc.variables['usrf'][:]
153 lsrf = nc.variables['lsrf'][:]
154
155 thk = nc.createVariable('thk', 'f8', ('y', 'x'))
156 thk.units = "meters"
157 thk.standard_name = "land_ice_thickness"
158
159 thk[:] = usrf - lsrf
160
161 nc.projection = "+proj=stere +ellps=WGS84 +datum=WGS84 +lon_0=0 +lat_0=-90 +lat_ts=-71 +units=m"
162 nc.close()
163
164 # Remove usrf and lsrf variables:
165 command = "ncks -x -v usrf,lsrf -O %s %s" % (output_filename, output_filename)
166 run(command)
167
168 return output_filename
169
170
171 def final_corrections(filename):
172 """
173 * replaces missing values with zeros
174 * computes Dirichlet B.C. locations
175 """
176 nc = NC.Dataset(filename, 'a')
177
178 # replace missing values with zeros
179 for var in ['u_ssa_bc', 'v_ssa_bc', 'magnitude']:
180 tmp = nc.variables[var][:]
181 tmp[tmp.mask == True] = 0
182 nc.variables[var][:] = tmp
183
184 thk = nc.variables['thk'][:]
185 topg = nc.variables['topg'][:]
186
187 # compute the grounded/floating mask:
188 mask = np.zeros(thk.shape, dtype='i')
189
190 def is_grounded(thickness, bed):
191 rho_ice = 910.0
192 rho_seawater = 1028.0
193 return bed + thickness > 0 + (1 - rho_ice / rho_seawater) * thickness
194
195 grounded_icy = 0
196 grounded_ice_free = 1
197 ocean_icy = 2
198 ocean_ice_free = 3
199
200 My, Mx = thk.shape
201 for j in range(My):
202 for i in range(Mx):
203 if is_grounded(thk[j, i], topg[j, i]):
204 if thk[j, i] > 1.0:
205 mask[j, i] = grounded_icy
206 else:
207 mask[j, i] = grounded_ice_free
208 else:
209 if thk[j, i] > 1.0:
210 mask[j, i] = ocean_icy
211 else:
212 mask[j, i] = ocean_ice_free
213
214 # compute the B.C. locations:
215 bc_mask = np.logical_or(mask == grounded_icy, mask == grounded_ice_free)
216
217 # mark ocean_icy cells next to grounded_icy ones too:
218 row = np.array([0, -1, 1, 0])
219 col = np.array([-1, 0, 0, 1])
220 for j in range(1, My - 1):
221 for i in range(1, Mx - 1):
222 nearest = mask[j + row, i + col]
223
224 if mask[j, i] == ocean_icy and np.any(nearest == grounded_icy):
225 bc_mask[j, i] = 1
226
227 # Do not prescribe SSA Dirichlet B.C. in ice-free ocean areas:
228 bc_mask[thk < 1.0] = 0
229
230 # modifications for the prognostic run
231 # this is to avoid grounding in the ice-shelf interior to make the results comparable to the diagnostic flow field
232 topg[np.logical_or(mask == ocean_icy, mask == ocean_ice_free)] = -2000.0
233
234 # cap temperature out in the ocean:
235 temperature = nc.variables[temp_name][:]
236 temperature[temperature > -20.0] = -20.0
237
238 nc.variables[temp_name][:] = temperature
239 nc.variables['topg'][:] = topg
240 bc_mask_var = nc.createVariable('bc_mask', 'i', ('y', 'x'))
241 bc_mask_var[:] = bc_mask
242
243 bad_bc_mask_mask = np.logical_and(thk < 1.0, bc_mask == 1)
244 bad_bc_mask_var = nc.createVariable('bad_bc_mask', 'i', ('y', 'x'))
245 bad_bc_mask_var[:] = bad_bc_mask_mask
246
247 mask_var = nc.createVariable('mask', 'i', ('y', 'x'))
248 mask_var[:] = mask
249
250 nc.close()
251
252
253 if __name__ == "__main__":
254
255 velocity = preprocess_ice_velocity()
256 albmap = preprocess_albmap()
257 albmap_velocity = os.path.splitext(albmap)[0] + "_velocity.nc" # ice velocity on the ALBMAP grid
258 output = "Ross_combined.nc"
259
260 commands = ["nc2cdo.py %s" % velocity,
261 "nc2cdo.py %s" % albmap,
262 "cdo remapbil,%s %s %s" % (albmap, velocity, albmap_velocity),
263 "ncks -x -v mask -O %s %s" % (albmap, output),
264 "ncks -v vx,vy,v_magnitude -A %s %s" % (albmap_velocity, output),
265 "ncrename -v vx,u_ssa_bc -v vy,v_ssa_bc -v v_magnitude,magnitude -O %s" % output]
266 run(commands)
267
268 final_corrections(output)