@@ -22,7 +22,6 @@ class ZarrModelGrid(object):
22
22
freqency (str): spacing between model time steps.
23
23
valid_dates: DatetimeIndex of all model timesteps
24
24
forecast_hours: array of all hours in the forecast
25
- file_objects (list): List of the file objects for each model time step
26
25
"""
27
26
def __init__ (self ,
28
27
path ,
@@ -37,10 +36,7 @@ def __init__(self,
37
36
self .start_date = pd .to_datetime (start_date )
38
37
self .end_date = pd .to_datetime (end_date )
39
38
self .frequency = frequency
40
- self .valid_dates = date_range (start = self .start_date ,
41
- end = self .end_date ,
42
- freq = self .frequency )
43
- print (self .run_date )
39
+ self .valid_dates = date_range (start = self .start_date , end = self .end_date , freq = self .frequency )
44
40
self .forecast_hours = (self .valid_dates - self .run_date ).astype ("timedelta64[h]" ).astype (int )
45
41
46
42
def load_data (self ):
@@ -49,21 +45,22 @@ def load_data(self):
49
45
level = self .variable .split ('-' )[1 ]
50
46
self .variable = self .variable .split ('-' )[0 ]
51
47
fs = s3fs .S3FileSystem (anon = True )
52
- files = []
53
48
run_date_str = self .run_date .strftime ("%Y%m%d" )
54
- forecast_hour = self .run_date .strftime ("%H" )
55
- path = join (self .path , run_date_str , f'{ run_date_str } _{ forecast_hour } z_fcst.zarr' , level , self .variable , level )
49
+ run_hour = self .run_date .strftime ("%H" )
50
+ path = join (self .path , run_date_str , f'{ run_date_str } _{ run_hour } z_fcst.zarr' , level , self .variable , level )
56
51
f = s3fs .S3Map (root = path , s3 = fs , check = False )
57
- files .append (f )
58
- ds = xr .open_mfdataset (files , engine = 'zarr' , parallel = True ).load ()
52
+ ds = xr .open_mfdataset ([f ], engine = 'zarr' , parallel = True ).load ()
59
53
60
-
61
- if self .run_date not in self .valid_dates :
62
- array = ds [self .variable ].values [self .forecast_hours [0 ] - 1 :self .forecast_hours [- 1 ]].astype ('float32' )
63
- elif self .run_date in self .valid_dates :
54
+ if self .run_date in self .valid_dates :
64
55
arr = ds [self .variable ].values [self .forecast_hours [0 ]:self .forecast_hours [- 1 ] + 1 ].astype ('float32' )
65
- dummy_forecast_hour_00 = np .zeros ((1 , arr .shape [1 ], arr .shape [2 ]))
66
- array = np .concatenate ([dummy_forecast_hour_00 , arr ])[self .forecast_hours [0 ]:self .forecast_hours [- 1 ] + 1 , :, :]
56
+ forecast_hour_00_path = join (self .path , run_date_str , f'{ run_date_str } _{ run_hour } z_anl.zarr' , level ,
57
+ self .variable .replace ('1hr_' , '' ), level )
58
+ fh_0_file = s3fs .S3Map (root = forecast_hour_00_path , s3 = fs , check = False )
59
+ fh_0_ds = xr .open_mfdataset ([fh_0_file ], engine = 'zarr' , parallel = True ).expand_dims ('time' )
60
+ fh_0_arr = fh_0_ds [self .variable .replace ('1hr_' , '' )].values
61
+ array = np .concatenate ([fh_0_arr , arr ])[self .forecast_hours [0 ]:self .forecast_hours [- 1 ] + 1 , :, :]
62
+ else :
63
+ array = ds [self .variable ].values [self .forecast_hours [0 ] - 1 :self .forecast_hours [- 1 ]].astype ('float32' )
67
64
68
65
if hasattr (ds [self .variable ], 'units' ):
69
66
units = ds [self .variable ].attrs ['units' ]
0 commit comments