@@ -136,6 +136,32 @@ def clean_fire_slug(name: str | None) -> str:
136136 return cleaned
137137
138138
139+ def _format_results_filename (
140+ identifier_source : Any | None , date_source : Any | None , fallback_slug : str
141+ ) -> tuple [str , str ]:
142+ """Return file-friendly identifier/date tuple for DEA outputs."""
143+ identifier = clean_fire_slug (str (identifier_source ).strip ()) if identifier_source not in (None , "" ) else ""
144+ if not identifier :
145+ identifier = fallback_slug
146+ date_value = process_date (date_source )
147+ if not date_value :
148+ date_value = datetime .utcnow ().strftime ("%Y-%m-%d" )
149+ fire_id_for_save = f"{ identifier } _{ date_value } "
150+ return fire_id_for_save , f"DEA_burn_severity_{ fire_id_for_save } .json"
151+
152+
153+ def _build_vector_filename (
154+ fire_series : pd .Series , attributes : dict [str , Any ], fallback_slug : str
155+ ) -> tuple [str , str ]:
156+ identifier_src = attributes .get ("fire_id" )
157+ if identifier_src in (None , "" ):
158+ identifier_src = _first_valid_value (fire_series , FIRE_ID_FIELDS )
159+ processed_date_src = attributes .get ("date_processed" )
160+ if processed_date_src in (None , "" ):
161+ processed_date_src = _first_valid_value (fire_series , ("date_processed" , "date_proce" ))
162+ return _format_results_filename (identifier_src , processed_date_src , fallback_slug )
163+
164+
139165def _select_reference_band (dataset : xr .Dataset ) -> str :
140166 preferred = ("nbart_green" , "nbart_red" , "nbart_nir_1" , "nbart_blue" )
141167 for candidate in preferred :
@@ -680,6 +706,9 @@ def process_single_fire(
680706 aggregated ["fire_name" ] = fire_name_value or fire_slug
681707 aggregated ["ignition_date" ] = fire_date
682708 aggregated ["extinguish_date" ] = extinguish_date
709+ fire_id_for_save , vector_filename = _build_vector_filename (
710+ fire_series = fire_series , attributes = attributes , fallback_slug = fire_slug
711+ )
683712
684713 for key , value in attributes .items ():
685714 if key in {"fire_id" , "fire_name" , "ignition_date" , "extinguish_date" }:
@@ -688,10 +717,13 @@ def process_single_fire(
688717
689718 base_dir = out_dir if out_dir else config .output_dir
690719 os .makedirs (base_dir , exist_ok = True )
720+ results_dir = os .path .join (base_dir , "results" )
721+ os .makedirs (results_dir , exist_ok = True )
691722
692- out_vec = os .path .join (base_dir , f"burn_severity_polygons_{ fire_slug } .geojson" )
723+ # This matches the required DEA_burn_severity_<fire_id>_<date>.json naming.
724+ out_vec = os .path .join (results_dir , vector_filename )
693725 aggregated .to_file (out_vec , driver = "GeoJSON" )
694- print (f"Saved per-fire severity GeoJSON: { out_vec } " )
726+ print (f"Saved per-fire severity GeoJSON ( { fire_id_for_save } ) : { out_vec } " )
695727
696728 out_cog_preview = os .path .join (base_dir , f"s2_postfire_preview_{ fire_slug } .tif" )
697729 write_cog (post .isel (time = 0 ).to_array ().compute (), fname = out_cog_preview , overwrite = True )
@@ -771,9 +803,11 @@ def main(config: RuntimeConfig | None = None) -> None:
771803 fire_dir = os .path .join (runtime .output_dir , base_fire_slug )
772804 os .makedirs (fire_dir , exist_ok = True )
773805
774- final_vector_path = os . path . join (
775- fire_dir , f"burn_severity_polygons_ { base_fire_slug } .geojson"
806+ fire_id_for_save , vector_filename = _build_vector_filename (
807+ fire_series = fire_series , attributes = fire_attrs , fallback_slug = base_fire_slug
776808 )
809+ results_dir = os .path .join (fire_dir , "results" )
810+ final_vector_path = os .path .join (results_dir , vector_filename )
777811 log_path = os .path .join (fire_dir , f"{ base_fire_slug } _processing.log" )
778812
779813 if not os .path .exists (log_path ):
@@ -790,17 +824,20 @@ def main(config: RuntimeConfig | None = None) -> None:
790824 skip_due_to_output = False
791825 if not runtime .force_rebuild :
792826 if _is_valid_geojson (final_vector_path ):
793- print (f"[Fire '{ base_fire_name } '] Local output exists & valid. Skipping." )
827+ print (
828+ f"[Fire '{ base_fire_name } ' ({ fire_id_for_save } )] Local output exists & valid. Skipping."
829+ )
794830 fire_skip += 1
795831 skip_due_to_output = True
796832 elif upload_to_s3 and s3_fs is not None :
797833 bucket , prefix = _parse_s3_uri (s3_prefix )
798834 remote_key = (
799- f"{ prefix } /{ base_fire_slug } /"
800- f"burn_severity_polygons_{ base_fire_slug } .geojson"
835+ f"{ prefix } /{ base_fire_slug } /results/{ vector_filename } "
801836 )
802837 if _s3_key_exists_and_nonempty (s3_fs , bucket , remote_key ):
803- print (f"[Fire '{ base_fire_name } '] Output exists in S3. Skipping." )
838+ print (
839+ f"[Fire '{ base_fire_name } ' ({ fire_id_for_save } )] Output exists in S3. Skipping."
840+ )
804841 fire_skip += 1
805842 skip_due_to_output = True
806843
0 commit comments