1

I want calculate vegetation phenology from ndvi netcdf dataset, I follow yours tutorials (DEA Australia and DEA Africa) to apply xr_phenology function , defined here. To avoid killed kernel of my python jupyter, I import my daily NDVI dataset (.nc) as dask using .chunk argument

enter image description here

After I smooth my daily ndvi dataset with the rolling function :

window=4
veg_smooth= ndvi_doy.rolling(time=window, min_periods=1).mean()
veg_smooth

enter image description here For calculating the phenology statistics , I use your code, like this :

pheno_stats = ['SOS','vSOS','POS','vPOS','EOS','vEOS','Trough','LOS','AOS'] #,'ROG','ROS'
method_sos = 'first'
method_eos = 'last'

with HiddenPrints():
    
    phen=ts.xr_phenology(
            veg_smooth.ndvi,
            method_sos=method_sos,
            method_eos=method_eos,
            stats=pheno_stats
            
                )

print(phen)

enter image description here

But when I want to display the results with the ".compute" function , I have this error and I can't display the results :

phen=phen.compute()
phen

ValueError: All-NaN slice encountered

To avoid the NaN values produced during the calculation of the "pheno stats" => A function "dea_tools.temporal.allNaN_arg" has been defined in your web site but I can't apply it ?

Can you help me in these step in order to ignore the NaN values and display the rest of results correctly.

Below the detail of this error :

      Phenology...

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
Cell In[11], line 1
----> 1 phen=phen.compute()
      2 phen

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/dataset.py:958, in Dataset.compute(self, **kwargs)
    939 """Manually trigger loading and/or computation of this dataset's data
    940 from disk or a remote source into memory and return a new dataset.
    941 Unlike load, the original dataset is left unaltered.
   (...)
    955 dask.compute
    956 """
    957 new = self.copy(deep=False)
--> 958 return new.load(**kwargs)

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/dataset.py:792, in Dataset.load(self, **kwargs)
    789 chunkmanager = get_chunked_array_type(*lazy_data.values())
    791 # evaluate all the chunked arrays simultaneously
--> 792 evaluated_data = chunkmanager.compute(*lazy_data.values(), **kwargs)
    794 for k, data in zip(lazy_data, evaluated_data):
    795     self.variables[k].data = data

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/daskmanager.py:70, in DaskManager.compute(self, *data, **kwargs)
     67 def compute(self, *data: DaskArray, **kwargs) -> tuple[np.ndarray, ...]:
     68     from dask.array import compute
---> 70     return compute(*data, **kwargs)

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/dask/base.py:628, in compute(traverse, optimize_graph, scheduler, get, *args, **kwargs)
    625     postcomputes.append(x.__dask_postcompute__())
    627 with shorten_traceback():
--> 628     results = schedule(dsk, keys, **kwargs)
    630 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/parallel.py:268, in map_blocks.<locals>._wrapper(func, args, kwargs, arg_is_array, expected)
    258 """
    259 Wrapper function that receives datasets in args; converts to dataarrays when necessary;
    260 passes these to the user function `func` and checks returned objects for expected shapes/sizes/etc.
    261 """
    263 converted_args = [
    264     dataset_to_dataarray(arg) if is_array else arg
    265     for is_array, arg in zip(arg_is_array, args)
    266 ]
--> 268 result = func(*converted_args, **kwargs)
    270 # check all dims are present
    271 missing_dimensions = set(expected["shapes"]) - set(result.sizes)

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/deafrica_tools/temporal.py:350, in xr_phenology(da, stats, method_sos, method_eos, verbose)
    348 trough = _trough(da)
    349 aos = _aos(vpos, trough)
--> 350 vsos = _vsos(da, pos, method_sos=method_sos)
    351 sos = _sos(vsos)
    352 veos = _veos(da, pos, method_eos=method_eos)

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/deafrica_tools/temporal.py:120, in _vsos(da, pos, method_sos)
    116 distance = pos_greenup - median
    118 if method_sos == "first":
    119     # find index (argmin) where distance is most negative
--> 120     idx = allNaN_arg(distance, "time", "min").astype("int16")
    122 if method_sos == "median":
    123     # find index (argmin) where distance is smallest absolute value
    124     idx = allNaN_arg(np.fabs(distance), "time", "min").astype("int16")

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/deafrica_tools/temporal.py:59, in allNaN_arg(da, dim, stat)
     57 if stat == "min":
     58     y = da.fillna(float(da.max() + 1))
---> 59     y = y.argmin(dim=dim, skipna=True).where(~mask)
     60     return y

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/dataarray.py:5969, in DataArray.argmin(self, dim, axis, keep_attrs, skipna)
   5875 def argmin(
   5876     self,
   5877     dim: Dims = None,
   (...)
   5880     skipna: bool | None = None,
   5881 ) -> DataArray | dict[Hashable, DataArray]:
   5882     """Index or indices of the minimum of the DataArray over one or more dimensions.
   5883 
   5884     If a sequence is passed to 'dim', then result returned as dict of DataArrays,
   (...)
   5967     Dimensions without coordinates: y
   5968     """
-> 5969     result = self.variable.argmin(dim, axis, keep_attrs, skipna)
   5970     if isinstance(result, dict):
   5971         return {k: self._replace_maybe_drop_dims(v) for k, v in result.items()}

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/variable.py:2842, in Variable.argmin(self, dim, axis, keep_attrs, skipna)
   2799 def argmin(
   2800     self,
   2801     dim: Dims = None,
   (...)
   2804     skipna: bool | None = None,
   2805 ) -> Variable | dict[Hashable, Variable]:
   2806     """Index or indices of the minimum of the Variable over one or more dimensions.
   2807     If a sequence is passed to 'dim', then result returned as dict of Variables,
   2808     which can be passed directly to isel(). If a single str is passed to 'dim' then
   (...)
   2840     DataArray.argmin, DataArray.idxmin
   2841     """
-> 2842     return self._unravel_argminmax("argmin", dim, axis, keep_attrs, skipna)

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/variable.py:2763, in Variable._unravel_argminmax(self, argminmax, dim, axis, keep_attrs, skipna)
   2754     dim = self.dims
   2755 if (
   2756     dim is None
   2757     or axis is not None
   (...)
   2761     # Return int index if single dimension is passed, and is not part of a
   2762     # sequence
-> 2763     return self.reduce(
   2764         argminmax_func, dim=dim, axis=axis, keep_attrs=keep_attrs, skipna=skipna
   2765     )
   2767 # Get a name for the new dimension that does not conflict with any existing
   2768 # dimension
   2769 newdimname = "_unravel_argminmax_dim_0"

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/variable.py:2012, in Variable.reduce(self, func, dim, axis, keep_attrs, keepdims, **kwargs)
   2008     if isinstance(axis, tuple) and len(axis) == 1:
   2009         # unpack axis for the benefit of functions
   2010         # like np.argmin which can't handle tuple arguments
   2011         axis = axis[0]
-> 2012     data = func(self.data, axis=axis, **kwargs)
   2013 else:
   2014     data = func(self.data, **kwargs)

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/duck_array_ops.py:382, in _create_nan_agg_method.<locals>.f(values, axis, skipna, **kwargs)
    380     with warnings.catch_warnings():
    381         warnings.filterwarnings("ignore", "All-NaN slice encountered")
--> 382         return func(values, axis=axis, **kwargs)
    383 except AttributeError:
    384     if not is_duck_dask_array(values):

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/nanops.py:82, in nanargmin(a, axis)
     79     fill_value = dtypes.get_pos_infinity(a.dtype)
     80     return _nan_argminmax_object("argmin", fill_value, a, axis=axis)
---> 82 return nputils.nanargmin(a, axis=axis)

File ~/anaconda3/envs/anaconda_env/lib/python3.11/site-packages/xarray/core/nputils.py:175, in _create_bottleneck_method.<locals>.f(values, axis, **kwargs)
    163 if (
    164     _USE_BOTTLENECK
    165     and OPTIONS["use_bottleneck"]
   (...)
    172 ):
    173     # bottleneck does not take care dtype, min_count
    174     kwargs.pop("dtype", None)
--> 175     result = bn_func(values, axis=axis, **kwargs)
    176 else:
    177     result = getattr(npmodule, name)(values, axis=axis, **kwargs)

ValueError: All-NaN slice encountered

0