tpsg_3d_ch.py - pism - [fork] customized build of PISM, the parallel ice sheet model (tillflux branch)
(HTM) git clone git://src.adamsgaard.dk/pism
(DIR) Log
(DIR) Files
(DIR) Refs
(DIR) LICENSE
---
tpsg_3d_ch.py (14684B)
---
1 #!/usr/bin/env python3
2 # Copyright (C) 2016-17 Andy Aschwanden
3
4 import itertools
5 from collections import OrderedDict
6 import numpy as np
7 import os
8 import sys
9 import shlex
10 from os.path import join, abspath, realpath, dirname
11
12 try:
13 import subprocess32 as sub
14 except:
15 import subprocess as sub
16
17 from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
18 import sys
19
20
21 def current_script_directory():
22 import inspect
23 filename = inspect.stack(0)[0][1]
24 return realpath(dirname(filename))
25
26
27 script_directory = current_script_directory()
28
29 from resources import *
30
31
32 def map_dict(val, mdict):
33 try:
34 return mdict[val]
35 except:
36 return val
37
38
39 grid_choices = [40, 20, 10]
40
41 # set up the option parser
42 parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
43 parser.description = "Generating scripts to run Storglaciaren."
44 parser.add_argument("--restart_file", dest="restart_file",
45 help="Input file to restart from", default=None)
46 parser.add_argument("-n", '--n_procs', dest="n", type=int,
47 help='''number of cores/processors. default=4.''', default=4)
48 parser.add_argument("-w", '--wall_time', dest="walltime",
49 help='''walltime. default: 100:00:00.''', default="100:00:00")
50 parser.add_argument("-q", '--queue', dest="queue", choices=list_queues(),
51 help='''queue. default=debug.''', default='debug')
52 parser.add_argument("--exstep", dest="exstep",
53 help="Writing interval for spatial time series", default=1)
54 parser.add_argument("-f", "--o_format", dest="oformat",
55 choices=['netcdf3', 'netcdf4_parallel', 'pnetcdf'],
56 help="output format", default='netcdf3')
57 parser.add_argument("-g", "--grid", dest="grid", type=int,
58 choices=grid_choices,
59 help="horizontal grid resolution", default=40)
60 parser.add_argument("--i_dir", dest="input_dir",
61 help="input directory", default=abspath(script_directory))
62 parser.add_argument("--o_dir", dest="output_dir",
63 help="output directory", default='output')
64 parser.add_argument("--o_size", dest="osize",
65 choices=['small', 'medium', 'big', 'big_2d', 'custom'],
66 help="output size type", default='custom')
67 parser.add_argument("-s", "--system", dest="system",
68 choices=list_systems(),
69 help="computer system to use.", default='debug')
70 parser.add_argument("--spatial_ts", dest="spatial_ts",
71 choices=['default', 'ch', 'none'],
72 help="output size type", default='default')
73 parser.add_argument("--hydrology", dest="hydrology",
74 choices=['null', 'diffuse', 'routing'],
75 help="Basal hydrology model.", default='diffuse')
76 parser.add_argument("--stress_balance", dest="stress_balance",
77 choices=['sia', 'ssa+sia', 'ssa'],
78 help="stress balance solver", default='ssa+sia')
79 parser.add_argument("--vertical_velocity_approximation", dest="vertical_velocity_approximation",
80 choices=['centered', 'upstream'],
81 help="How to approximate vertical velocities", default='upstream')
82 parser.add_argument("--start_year", dest="start_year", type=int,
83 help="Simulation start year", default=0)
84 parser.add_argument("--duration", dest="duration", type=int,
85 help="Years to simulate", default=100)
86 parser.add_argument("--step", dest="step", type=int,
87 help="Step in years for restarting", default=100)
88 parser.add_argument("-e", "--ensemble_file", dest="ensemble_file",
89 help="File that has all combinations for ensemble study", default='ensemble.csv')
90 parser.add_argument("-b", "--bootstrap", dest="bootstrap", action="store_true",
91 help="Use bootstrapping", default=False)
92 parser.add_argument("--climate", dest="climate",
93 choices=['init', 'elev+ftt', 'warming'],
94 help="Climate forcing", default='init')
95 parser.add_argument("--cryohydrologic_warming", dest="cryohydrologic_warming", action='store_true',
96 help="Turn on cryohydrologic warming", default=False)
97
98
99 options = parser.parse_args()
100
101 nn = options.n
102 input_dir = abspath(options.input_dir)
103 output_dir = abspath(options.output_dir)
104
105 oformat = options.oformat
106 osize = options.osize
107 queue = options.queue
108 walltime = options.walltime
109 system = options.system
110
111 spatial_ts = options.spatial_ts
112
113 climate = options.climate
114 cryohydrologic_warming = options.cryohydrologic_warming
115 vertical_velocity_approximation = options.vertical_velocity_approximation
116
117 exstep = options.exstep
118 grid = options.grid
119 hydrology = options.hydrology
120 stress_balance = options.stress_balance
121 bootstrap = options.bootstrap
122
123 ensemble_file = options.ensemble_file
124
125 domain = 'storglaciaren'
126 pism_exec = generate_domain(domain)
127
128 pism_dataname = '$input_dir/pism_storglaciaren_3d.nc'
129
130 if options.restart_file is None:
131 input_file = pism_dataname
132 else:
133 input_file = options.restart_file
134
135 climate_file = pism_dataname
136
137 regridvars = 'litho_temp,enthalpy,age,tillwat,bmelt,ice_area_specific_volume,thk'
138
139 dirs = {"output": "$output_dir"}
140 for d in ["performance", "state", "scalar", "spatial", "snap", "jobs", "basins"]:
141 dirs[d] = "$output_dir/{dir}".format(dir=d)
142
143 if spatial_ts == 'none':
144 del dirs["spatial"]
145
146 # use the actual path of the run scripts directory (we need it now and
147 # not during the simulation)
148 scripts_dir = join(output_dir, "run_scripts")
149 if not os.path.isdir(scripts_dir):
150 os.makedirs(scripts_dir)
151
152 # generate the config file *after* creating the output directory
153 pism_config = 'psg_config'
154 pism_config_nc = join(output_dir, pism_config + ".nc")
155
156 cmd = "ncgen -o {output} {input_dir}/{config}.cdl".format(output=pism_config_nc,
157 input_dir=input_dir,
158 config=pism_config)
159 sub.call(shlex.split(cmd))
160
161 # these Bash commands are added to the beginning of the run scrips
162 run_header = """# stop if a variable is not defined
163 set -u
164 # stop on errors
165 set -e
166
167 # path to the config file
168 config="{config}"
169 # path to the input directory (input data sets are contained in this directory)
170 input_dir="{input_dir}"
171 # output directory
172 output_dir="{output_dir}"
173
174 # create required output directories
175 for each in {dirs};
176 do
177 mkdir -p $each
178 done
179
180 """.format(input_dir=input_dir,
181 output_dir=output_dir,
182 config=pism_config_nc,
183 dirs=" ".join(dirs.values()))
184
185 # ########################################################
186 # set up model initialization
187 # ########################################################
188
189 ssa_n = 3.0
190 ssa_e = 1.0
191 tefo = 0.020
192 phi_min = 5.0
193 phi_max = 40.
194 topg_min = -700
195 topg_max = 700
196
197
198 try:
199 combinations = np.loadtxt(ensemble_file, delimiter=',', skiprows=1)
200 except:
201 try:
202 combinations = np.genfromtxt(ensemble_file, dtype=None, delimiter=',', skip_header=1, encoding=None)
203 except:
204 combinations = np.genfromtxt(ensemble_file, dtype=None, delimiter=',', skip_header=1)
205
206
207 tsstep = 'yearly'
208
209 scripts = []
210 scripts_combinded = []
211
212 simulation_start_year = options.start_year
213 simulation_end_year = options.start_year + options.duration
214 restart_step = options.step
215
216 if restart_step > (simulation_end_year - simulation_start_year):
217 print('Error:')
218 print('restart_step > (simulation_end_year - simulation_start_year): {} > {}'.format(restart_step,
219 simulation_end_year - simulation_start_year))
220 print('Try again')
221 import sys
222 sys.exit(0)
223
224 batch_header, batch_system = make_batch_header(system, nn, walltime, queue)
225 print(batch_header, batch_system)
226 for n, combination in enumerate(combinations):
227
228 run_id, sia_e, sia_n, ssa_e, ssa_n, ppq, uth, phi_min, phi_max, topg_min, topg_max = combination
229
230 ttphi = '{},{},{},{}'.format(phi_min, phi_max, topg_min, topg_max)
231
232 name_options = OrderedDict()
233 name_options['sb'] = stress_balance
234 name_options['climate'] = climate
235 try:
236 name_options['id'] = '{:03d}'.format(int(run_id))
237 except:
238 name_options['id'] = '{}'.format(run_id)
239
240 full_exp_name = '_'.join(['_'.join(['_'.join([k, str(v)]) for k, v in name_options.items()])])
241 full_outfile = 'g{grid}m_{experiment}.nc'.format(grid=grid, experiment=full_exp_name)
242
243 # All runs in one script file for coarse grids that fit into max walltime
244 script_combined = join(scripts_dir, 'sg_g{}m_{}_j.sh'.format(grid, full_exp_name))
245 with open(script_combined, 'w') as f_combined:
246
247 outfiles = []
248 job_no = 0
249 for start in range(simulation_start_year, simulation_end_year, restart_step):
250 job_no += 1
251
252 end = start + restart_step
253
254 experiment = '_'.join(['_'.join(['_'.join([k, str(v)])
255 for k, v in name_options.items()]), '{}'.format(start), '{}'.format(end)])
256
257 script = join(scripts_dir, 'sg_g{}m_{}.sh'.format(grid, experiment))
258 scripts.append(script)
259
260 for filename in (script):
261 try:
262 os.remove(filename)
263 except OSError:
264 pass
265
266 if (start == simulation_start_year):
267 f_combined.write(batch_header)
268 f_combined.write(run_header)
269
270 with open(script, 'w') as f:
271
272 f.write(batch_header)
273 f.write(run_header)
274
275 outfile = '{domain}_g{grid}m_{experiment}.nc'.format(
276 domain=domain.lower(), grid=grid, experiment=experiment)
277
278 pism = generate_prefix_str(pism_exec)
279
280 general_params_dict = {
281 'ys': start,
282 'ye': end,
283 'calendar': '365_day',
284 'climate_forcing_buffer_size': 365,
285 'o': join(dirs["state"], outfile),
286 'o_format': oformat,
287 'config_override': "$config"
288 }
289
290 if start == simulation_start_year and bootstrap:
291 general_params_dict['bootstrap'] = ''
292 general_params_dict['i'] = pism_dataname
293 elif start == simulation_start_year:
294 general_params_dict['bootstrap'] = ''
295 general_params_dict['i'] = pism_dataname
296 general_params_dict['regrid_file'] = input_file
297 general_params_dict['regrid_vars'] = regridvars
298 else:
299 general_params_dict['i'] = regridfile
300
301 if osize != 'custom':
302 general_params_dict['o_size'] = osize
303 else:
304 general_params_dict['output.sizes.medium'] = 'sftgif,velsurf_mag'
305
306 if start == simulation_start_year:
307 grid_params_dict = generate_grid_description(grid, domain)
308 else:
309 grid_params_dict = generate_grid_description(grid, domain, restart=True)
310
311 sb_params_dict = {'sia_e': sia_e,
312 'pseudo_plastic_q': ppq,
313 'pseudo_plastic_threshold': uth,
314 'till_effective_fraction_overburden': tefo,
315 'vertical_velocity_approximation': vertical_velocity_approximation}
316
317 if start == simulation_start_year:
318 sb_params_dict['topg_to_phi'] = ttphi
319
320 stress_balance_params_dict = generate_stress_balance(stress_balance, sb_params_dict)
321
322 climate_params_dict = generate_climate(
323 climate, **{'energy.ch_warming.enabled': cryohydrologic_warming})
324
325 hydro_params_dict = generate_hydrology(hydrology)
326
327 scalar_ts_dict = generate_scalar_ts(outfile, tsstep,
328 start=simulation_start_year,
329 end=simulation_end_year,
330 odir=dirs["scalar"])
331
332 all_params_dict = merge_dicts(general_params_dict,
333 grid_params_dict,
334 stress_balance_params_dict,
335 climate_params_dict,
336 hydro_params_dict,
337 scalar_ts_dict)
338
339 if not spatial_ts == 'none':
340 if spatial_ts == 'default':
341 exvars = default_spatial_ts_vars()
342 else:
343 exvars = ch_spatial_ts_vars()
344 spatial_ts_dict = generate_spatial_ts(outfile, exvars, exstep, odir=dirs["spatial"], split=False)
345
346 all_params_dict = merge_dicts(all_params_dict,
347 spatial_ts_dict)
348
349 all_params = ' \\\n '.join(["-{} {}".format(k, v) for k, v in all_params_dict.items()])
350
351 if system == 'debug':
352 redirect = ' 2>&1 | tee {jobs}/job_{job_no}'
353 else:
354 redirect = ' > {jobs}/job_{job_no}.${job_id} 2>&1'
355
356 template = "{mpido} {pism} {params}" + redirect
357
358 context = merge_dicts(batch_system,
359 dirs,
360 {"job_no": job_no, "pism": pism, "params": all_params})
361 cmd = template.format(**context)
362
363 f.write(cmd)
364 f.write('\n')
365 f.write(batch_system.get("footer", ""))
366
367 f_combined.write(cmd)
368 f_combined.write('\n\n')
369
370 regridfile = join(dirs["state"], outfile)
371 outfiles.append(outfile)
372
373 f_combined.write(batch_system.get("footer", ""))
374
375 scripts_combinded.append(script_combined)
376
377
378 scripts = uniquify_list(scripts)
379 scripts_combinded = uniquify_list(scripts_combinded)
380 print('\n'.join([script for script in scripts]))
381 print('\nwritten\n')
382 print('\n'.join([script for script in scripts_combinded]))
383 print('\nwritten\n')