9
9
import psutil
10
10
import requests
11
11
import xarray as xr
12
+ import numpy as np
12
13
from pystac .item_collection import ItemCollection
13
14
from pystac_client import Client
14
15
from odc import stac
@@ -685,10 +686,6 @@ def datacube(
685
686
"No cross calibration coefficient available for the specified collections."
686
687
)
687
688
688
- groupby_date_sensor_cube = groupby_date
689
- if mask_with and groupby_date :
690
- groupby_date_sensor_cube = None
691
-
692
689
xr_datacube = datacube (
693
690
items ,
694
691
intersects = intersects ,
@@ -697,7 +694,7 @@ def datacube(
697
694
common_band_names = common_band_names ,
698
695
cross_calibration_items = xcal_items ,
699
696
properties = properties ,
700
- groupby_date = groupby_date_sensor_cube ,
697
+ groupby_date = None if mask_with == "ag_cloud_mask" else groupby_date ,
701
698
** kwargs ,
702
699
)
703
700
if mask_with :
@@ -750,32 +747,39 @@ def datacube(
750
747
Mask = mask .Mask (xr_datacube , intersects = intersects , bbox = bbox )
751
748
xr_datacube = getattr (Mask , mask_with )(** mask_kwargs )
752
749
753
- if clear_cover :
754
- xr_datacube = mask . filter_clear_cover ( xr_datacube , clear_cover )
755
- if groupby_date and mask_with :
756
- grouped_coords = []
757
- # for coords using only time dimensions like clear_pixels, keeping the max
758
- for coord in xr_datacube . coords :
759
- if coord in ( "x" , "y" , "time" ):
760
- continue
761
- if (
762
- len ( xr_datacube [ coord ]. dims ) == 1
763
- and xr_datacube [ coord ]. dims [ 0 ] == "time"
764
- ):
765
- grouped_coords . append (
766
- xr_datacube [ coord ]
767
- . groupby ( "time.date" , squeeze = True )
768
- . max ()
769
- . rename ( dict ( date = "time" ))
750
+ if groupby_date :
751
+ xr_datacube = xr_datacube . groupby ( "time.date" , restore_coord_dims = True )
752
+ xr_datacube = getattr ( xr_datacube , groupby_date )(). rename (
753
+ dict ( date = "time" )
754
+ )
755
+ xr_datacube [ "time" ] = xr_datacube . time . astype ( "<M8[ns]" )
756
+
757
+ if clear_cover or mask_statistics :
758
+ first_var = xr_datacube [ list ( xr_datacube . data_vars )[ 0 ]]
759
+ xy = first_var . isel ( time = 0 ). size
760
+
761
+ null_pixels = ( first_var . isnull (). sum ( dim = ( "x" , "y" ))). values
762
+ n_pixels_as_labels = xy - null_pixels
763
+ # n_pixels_as_labels = xr_datacube.attrs["usable_pixels"] - n_pixels_as_labels
764
+
765
+ xr_datacube = xr_datacube . assign_coords (
766
+ { "clear_pixels" : ( "time" , n_pixels_as_labels )}
770
767
)
771
768
772
- xr_datacube = xr_datacube .groupby ("time.date" , restore_coord_dims = True )
773
- xr_datacube = getattr (xr_datacube , groupby_date )().rename (dict (date = "time" ))
774
- for grouped_coord in grouped_coords :
775
- xr_datacube = xr_datacube .assign_coords (
776
- {grouped_coord .name : grouped_coord }
777
- )
778
- xr_datacube ["time" ] = xr_datacube .time .astype ("<M8[ns]" )
769
+ xr_datacube = xr_datacube .assign_coords (
770
+ {
771
+ "clear_percent" : (
772
+ "time" ,
773
+ np .multiply (
774
+ n_pixels_as_labels
775
+ / xr_datacube .attrs ["usable_pixels" ],
776
+ 100 ,
777
+ ).astype (np .int8 ),
778
+ )
779
+ }
780
+ )
781
+ if clear_cover :
782
+ xr_datacube = mask .filter_clear_cover (xr_datacube , clear_cover )
779
783
780
784
return xr_datacube
781
785
0 commit comments