1+ #!/usr/bin/env python
2+ # encoding: utf-8
3+ r"""
4+ Routines for reading and writing a HDF5 output file
5+
6+ This module reads and writes hdf5 files via the following module:
7+ h5py - http://code.google.com/p/h5py/
8+
9+ To install h5py, you must also install the hdf5 library from the website:
10+ http://www.hdfgroup.org/HDF5/release/obtain5.html
11+ """
12+
13+ from mpi4py import MPI
14+ import os
15+ import logging
16+
17+ from clawpack .petclaw import geometry
18+ from clawpack import petclaw
19+
20+ logger = logging .getLogger ('pyclaw.io' )
21+
22+ try :
23+ import h5py
24+ except :
25+ logging .critical ("Could not import h5py!" )
26+ error_msg = ("Could not import h5py, please install " +
27+ "either h5py. See the doc_string for more " +
28+ "information." )
29+ raise Exception (error_msg )
30+
31+ def write (solution ,frame ,path ,file_prefix = 'claw' ,write_aux = False ,
32+ options = {},write_p = False ):
33+ r"""
34+ Write out a Solution to a HDF5 file.
35+
36+ :Input:
37+ - *solution* - (:class:`~pyclaw.solution.Solution`) Pyclaw solution
38+ object to input into
39+ - *frame* - (int) Frame number
40+ - *path* - (string) Root path
41+ - *file_prefix* - (string) Prefix for the file name. ``default = 'claw'``
42+ - *write_aux* - (bool) Boolean controlling whether the associated
43+ auxiliary array should be written out. ``default = False``
44+ - *options* - (dict) Optional argument dictionary, see
45+ `HDF5 Option Table`_
46+
47+ .. _`HDF5 Option Table`:
48+
49+ +-----------------+------------------------------------------------------+
50+ | Key | Value |
51+ +=================+======================================================+
52+ | compression | (None, string ["gzip" | "lzf" | "szip"] or int 0-9) |
53+ | | Enable dataset compression. DEFLATE, LZF and (where |
54+ | | available) SZIP are supported. An integer is |
55+ | | interpreted as a GZIP level for backwards |
56+ | | compatibility. |
57+ +-----------------+------------------------------------------------------+
58+ |compression_opts | (None, or special value) Setting for compression |
59+ | | filter; legal values for each filter type are: |
60+ | | |
61+ | | - *gzip* - (int) 0-9 |
62+ | | - *lzf* - None allowed |
63+ | | - *szip* - (tuple) 2-tuple ('ec'|'nn', even integer |
64+ | | 0-32) |
65+ | | |
66+ | | See the filters module for a detailed description of |
67+ | | each of these filters. |
68+ +-----------------+------------------------------------------------------+
69+ | chunks | (None, True or shape tuple) Store the dataset in |
70+ | | chunked format. Automatically selected if any of the |
71+ | | other keyword options are given. If you don't provide|
72+ | | a shape tuple, the library will guess one for you. |
73+ +-----------------+------------------------------------------------------+
74+ | shuffle | (True/False) Enable/disable data shuffling, which can|
75+ | | improve compression performance. Automatically |
76+ | | enabled when compression is used. |
77+ +-----------------+------------------------------------------------------+
78+ | fletcher32 | (True/False) Enable Fletcher32 error detection; may |
79+ | | be used with or without compression. |
80+ +-----------------+------------------------------------------------------+
81+ """
82+ option_defaults = {'compression' :None ,'compression_opts' :None ,
83+ 'chunks' :None ,'shuffle' :False ,'fletcher32' :False }
84+ for (k ,v ) in option_defaults .iteritems ():
85+ options [k ] = options .get (k ,v )
86+
87+ filename = os .path .join (path ,'%s%s.hdf' %
88+ (file_prefix ,str (frame ).zfill (4 )))
89+
90+ if options ['compression' ] is not None :
91+ err_msg = "Compression (filters) are not available for parallel h5py yet."
92+ logging .critical (err_msg )
93+ raise Exception (err_msg )
94+
95+ with h5py .File (filename ,'w' ,driver = 'mpio' ,comm = MPI .COMM_WORLD ) as f :
96+ # For each patch, write out attributes
97+ for state in solution .states :
98+ patch = state .patch
99+ # Create group for this patch
100+ subgroup = f .create_group ('patch%s' % patch .patch_index )
101+
102+ # General patch properties
103+ subgroup .attrs ['t' ] = state .t
104+ subgroup .attrs ['num_eqn' ] = state .num_eqn
105+ subgroup .attrs ['num_aux' ] = state .num_aux
106+ for attr in ['num_ghost' ,'patch_index' ,'level' ]:
107+ if hasattr (patch ,attr ):
108+ if getattr (patch ,attr ) is not None :
109+ subgroup .attrs [attr ] = getattr (patch ,attr )
110+
111+ # Add the dimension names as a attribute
112+ subgroup .attrs ['dimensions' ] = patch .get_dim_attribute ('name' )
113+ # Dimension properties
114+ for dim in patch .dimensions :
115+ for attr in ['num_cells' ,'lower' ,'delta' ,'upper' ,
116+ 'units' ]:
117+ if hasattr (dim ,attr ):
118+ if getattr (dim ,attr ) is not None :
119+ attr_name = '%s.%s' % (dim .name ,attr )
120+ subgroup .attrs [attr_name ] = getattr (dim ,attr )
121+
122+ if write_p :
123+ q = state .p
124+ else :
125+ q = state .q
126+ r = patch ._da .getRanges ()
127+ globalSize = []
128+ globalSize .append (q .shape [0 ])
129+ globalSize .extend (patch .num_cells_global )
130+ dset = subgroup .create_dataset ('q' ,globalSize ,dtype = 'float' ,** options )
131+ to_hdf5_dataset (q , dset , len (patch .dimensions ), r )
132+
133+ if write_aux and state .num_aux > 0 :
134+ r = patch ._da .getRanges ()
135+ globalSize = []
136+ globalSize .append (state .num_aux )
137+ globalSize .extend (patch .num_cells_global )
138+ dset = subgroup .create_dataset ('aux' ,globalSize ,dtype = 'float' ,** options )
139+ to_hdf5_dataset (state .aux , dset , len (patch .dimensions ), r )
140+
141+
142+ def read (solution ,frame ,path = './' ,file_prefix = 'claw' ,read_aux = True ,
143+ options = {}):
144+ r"""
145+ Read in a HDF5 file into a Solution
146+
147+ :Input:
148+ - *solution* - (:class:`~pyclaw.solution.Solution`) Pyclaw object to be
149+ output
150+ - *frame* - (int) Frame number
151+ - *path* - (string) Root path
152+ - *file_prefix* - (string) Prefix for the file name. ``default = 'claw'``
153+ - *write_aux* - (bool) Boolean controlling whether the associated
154+ auxiliary array should be written out. ``default = False``
155+ - *options* - (dict) Optional argument dictionary, not used for reading.
156+ """
157+ filename = os .path .join (path ,'%s%s.hdf' %
158+ (file_prefix ,str (frame ).zfill (4 )))
159+ patches = []
160+
161+ with h5py .File (filename ,'r' ,driver = 'mpio' ,comm = MPI .COMM_WORLD ) as f :
162+ for patch in f .itervalues ():
163+ # Construct each dimension
164+ dimensions = []
165+ dim_names = patch .attrs ['dimensions' ]
166+ for dim_name in dim_names :
167+ dim = geometry .Dimension (
168+ patch .attrs ["%s.lower" % dim_name ],
169+ patch .attrs ["%s.upper" % dim_name ],
170+ patch .attrs ["%s.num_cells" % dim_name ],
171+ name = dim_name )
172+ # Optional attributes
173+ for attr in ['units' ]:
174+ attr_name = "%s.%s" % (dim_name ,attr )
175+ if patch .attrs .get (attr_name , None ):
176+ setattr (dim ,attr ,patch .attrs ["%s.%s" % (dim_name ,attr )])
177+ dimensions .append (dim )
178+
179+ pyclaw_patch = petclaw .Patch (dimensions )
180+
181+ # Fetch general patch properties
182+ for attr in ['t' ,'num_eqn' ,'patch_index' ,'level' ]:
183+ setattr (pyclaw_patch ,attr ,patch .attrs [attr ])
184+
185+ state = petclaw .state .State (pyclaw_patch , \
186+ patch .attrs ['num_eqn' ],patch .attrs ['num_aux' ])
187+ state .t = patch .attrs ['t' ]
188+
189+ globalSize = []
190+ globalSize .append (state .q .shape [0 ])
191+ globalSize .extend (pyclaw_patch .num_cells_global )
192+ r = pyclaw_patch ._da .getRanges ()
193+
194+ dset = patch ['q' ][:].reshape (globalSize )
195+ state .q = from_hdf5_dataset (dset , len (pyclaw_patch .dimensions ), r , state .q .shape )
196+
197+ # Read in aux if applicable
198+ if read_aux and patch .get ('aux' ,None ) is not None :
199+ dset = patch ['aux' ][:]
200+ state .aux = from_hdf5_dataset (dset , len (pyclaw_patch .dimensions ), r , state .aux .shape )
201+
202+ solution .states .append (state )
203+ patches .append (pyclaw_patch )
204+
205+ solution .domain = geometry .Domain (patches )
206+
207+ def to_hdf5_dataset (arr , dset , ndim , ranges ):
208+ if ndim == 1 :
209+ dset [:,ranges [0 ][0 ]:ranges [0 ][1 ]] = arr
210+ elif ndim == 2 :
211+ dset [:,ranges [0 ][0 ]:ranges [0 ][1 ],ranges [1 ][0 ]:ranges [1 ][1 ]] = arr
212+ elif ndim == 3 :
213+ dset [:,ranges [0 ][0 ]:ranges [0 ][1 ],ranges [1 ][0 ]:ranges [1 ][1 ],ranges [2 ][0 ]:ranges [2 ][1 ]] = arr
214+
215+ def from_hdf5_dataset (dset , ndim , ranges , shape ):
216+ if ndim == 1 :
217+ return dset [:,ranges [0 ][0 ]:ranges [0 ][1 ]].reshape (shape ,order = 'F' )
218+ elif ndim == 2 :
219+ return dset [:,ranges [0 ][0 ]:ranges [0 ][1 ],ranges [1 ][0 ]:ranges [1 ][1 ]].reshape (shape ,order = 'F' )
220+ elif ndim == 3 :
221+ return dset [:,ranges [0 ][0 ]:ranges [0 ][1 ],ranges [1 ][0 ]:ranges [1 ][1 ],ranges [2 ][0 ]:ranges [2 ][1 ]].reshape (shape ,order = 'F' )
0 commit comments