|
1 |
| -import logging |
2 |
| -from datetime import datetime |
3 | 1 | from collections import defaultdict
|
| 2 | +import logging |
4 | 3 | import pandas as pd
|
5 | 4 | import geopandas as gpd
|
6 | 5 | import numpy as np
|
7 |
| -import pytz |
8 | 6 | import xarray as xr
|
9 | 7 | from rasterio.enums import Resampling
|
10 | 8 | from shapely.geometry import box
|
|
14 | 12 | from .asset_mapper import AssetMapper
|
15 | 13 | import rioxarray
|
16 | 14 | from functools import wraps
|
| 15 | +import json |
17 | 16 |
|
18 | 17 | __all__ = ["GeometryManager", "rioxarray", "zonal_stats", "zonal_stats_numpy"]
|
19 | 18 |
|
@@ -118,6 +117,8 @@ def _cube_odc(
|
118 | 117 | metadata = {k: ("time", v.tolist()) for k, v in df.items()}
|
119 | 118 | # assign metadata as coords
|
120 | 119 | ds = ds.assign_coords(**metadata)
|
| 120 | + ds = ds.chunk(kwargs["chunks"]) |
| 121 | + |
121 | 122 | return ds
|
122 | 123 |
|
123 | 124 |
|
@@ -249,6 +250,16 @@ def datacube(
|
249 | 250 |
|
250 | 251 | if isinstance(assets, dict):
|
251 | 252 | ds = ds.rename(assets)
|
| 253 | + |
| 254 | + for coord in ds.coords: |
| 255 | + if ds.coords[coord].values.shape == (): |
| 256 | + continue |
| 257 | + if isinstance(ds.coords[coord].values[0], (list, dict)): |
| 258 | + ds.coords[coord].values = [ |
| 259 | + json.dumps(ds.coords[coord].values[idx]) |
| 260 | + for idx in range(ds.coords[coord].size) |
| 261 | + ] |
| 262 | + |
252 | 263 | return ds
|
253 | 264 |
|
254 | 265 |
|
@@ -358,14 +369,9 @@ def rescale_assets_with_items(
|
358 | 369 | ds_scaled[asset] = []
|
359 | 370 | for scale in scales[asset].keys():
|
360 | 371 | for offset in scales[asset][scale].keys():
|
361 |
| - times = list(set(scales[asset][scale][offset])) |
362 |
| - if len(times) != len(scales[asset][scale][offset]): |
363 |
| - for time in times: |
364 |
| - d = ds[[asset]].loc[dict(time=time)] * scale + offset |
365 |
| - ds_scaled[asset].append(d) |
366 |
| - else: |
367 |
| - d = ds[[asset]].loc[dict(time=times)] * scale + offset |
368 |
| - ds_scaled[asset].append(d) |
| 372 | + times = np.in1d(ds.time, list(set(scales[asset][scale][offset]))) |
| 373 | + asset_scaled = ds[[asset]].isel(time=times) * scale + offset |
| 374 | + ds_scaled[asset].append(asset_scaled) |
369 | 375 | ds_ = []
|
370 | 376 | for k, v in ds_scaled.items():
|
371 | 377 | ds_k = []
|
|
0 commit comments