@@ -158,12 +158,19 @@ def generate_golden_files(
158158 container .ingest .properties (
159159 lulc_csv = str (lulc_csv ) if lulc_csv .exists () else None ,
160160 soils_csv = str (ssurgo_csv ) if ssurgo_csv .exists () else None ,
161- irrigation_csv = str (irr_csv ) if irr_csv .exists () else None ,
161+ irr_csv = str (irr_csv ) if irr_csv .exists () else None ,
162162 uid_column = "site_id" ,
163163 lulc_column = "modis_lc" ,
164164 extra_lulc_column = "glc10_lc" ,
165165 )
166166
167+ # Compute merged NDVI (required before dynamics)
168+ logger .info ("Computing merged NDVI..." )
169+ container .compute .merged_ndvi (
170+ masks = tuple (masks_to_ingest ),
171+ instruments = (instrument ,),
172+ )
173+
167174 # Compute dynamics with all ingested masks
168175 logger .info ("Computing dynamics..." )
169176 container .compute .dynamics (
@@ -199,6 +206,9 @@ def generate_golden_files(
199206 irr_data = {}
200207 for i , uid in enumerate (container .field_uids ):
201208 val = irr_arr [i ]
209+ # Handle zarr v3 ndarray returns
210+ if hasattr (val , "item" ):
211+ val = val .item ()
202212 if val :
203213 irr_data [uid ] = json .loads (val )
204214 else :
@@ -213,80 +223,19 @@ def generate_golden_files(
213223 gwsub_data = {}
214224 for i , uid in enumerate (container .field_uids ):
215225 val = gwsub_arr [i ]
226+ # Handle zarr v3 ndarray returns
227+ if hasattr (val , "item" ):
228+ val = val .item ()
216229 if val :
217230 gwsub_data [uid ] = json .loads (val )
218231 else :
219232 gwsub_data [uid ] = None
220233 golden_outputs ["gwsub_data" ] = gwsub_data
221234 logger .info (f"Extracted gwsub_data for { len (gwsub_data )} fields" )
222235
223- # 5. Export prepped_input.json
224- prepped_path = Path (tmp_dir ) / "prepped_input.json"
225- logger .info ("Exporting prepped_input.json..." )
226- container .export .prepped_input_json (
227- output_path = str (prepped_path ),
228- etf_model = etf_model ,
229- masks = tuple (masks_to_ingest ),
230- )
231-
232- # Read and parse the prepped input
233- with open (prepped_path ) as f :
234- # It's a JSONL file, so read first line as sample
235- first_line = f .readline ()
236- if first_line :
237- prepped_sample = json .loads (first_line )
238- # Store just the structure and a subset for testing
239- prepped_summary = {
240- "field_count" : len (container .field_uids ),
241- "fields" : container .field_uids ,
242- "first_field_keys" : list (prepped_sample .keys ()) if prepped_sample else [],
243- }
244- golden_outputs ["prepped_input_summary" ] = prepped_summary
245-
246- # Save full prepped input (copy the file)
247- import shutil
248-
249- shutil .copy (prepped_path , output_dir / "prepped_input.json" )
250-
251- # 6. Generate spinup by running the model
252- logger .info ("Generating spinup state by running model..." )
253- try :
254- from swimrs .model .obs_field_cycle import field_day_loop
255- from swimrs .swim .config import ProjectConfig
256- from swimrs .swim .sampleplots import SamplePlots
257-
258- # Create a minimal config for running the model
259- # We need to run with the prepped_input.json we just generated
260- config = ProjectConfig ()
261-
262- # Set minimal required attributes
263- config .prepped_input = str (prepped_path )
264- config .start_dt = datetime .strptime (start_date , "%Y-%m-%d" )
265- config .end_dt = datetime .strptime (end_date , "%Y-%m-%d" )
266- config .fields_shapefile = str (shapefile )
267- config .feature_id_col = uid_column
268- config .refet_type = "eto"
269- config .irrigation_threshold = 0.3
270- config .runoff_process = "cn"
271- config .mode_forecast = False
272- config .mode_calib = False
273-
274- # Initialize plots and run model
275- plots = SamplePlots ()
276- plots .initialize_plot_data (config )
277- output = field_day_loop (config , plots , debug_flag = False )
278-
279- # Extract final state for each field
280- spinup_data = {}
281- for field_id , field_df in output .items ():
282- spinup_data [field_id ] = field_df .iloc [- 1 ].to_dict ()
283-
284- golden_outputs ["spinup" ] = spinup_data
285- logger .info (f"Generated spinup for { len (spinup_data )} field(s)" )
286-
287- except Exception as e :
288- logger .warning (f"Failed to generate spinup: { e } " )
289- logger .warning ("Spinup file will not be generated" )
236+ # NOTE: prepped_input and spinup generation removed - those use legacy APIs
237+ # The core golden files (ke_max, kc_max, irr_data, gwsub_data) are sufficient
238+ # for regression testing the dynamics computation
290239
291240 container .close ()
292241
0 commit comments