Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Help]: Problem opening stack of interferograms #179

Open
anaferreira97 opened this issue Nov 26, 2024 · 1 comment
Open

[Help]: Problem opening stack of interferograms #179

anaferreira97 opened this issue Nov 26, 2024 · 1 comment

Comments

@anaferreira97
Copy link

Hi everyone,

I'm trying to open a stack of interferograms (ds_sbas = sbas.open_stack("intf_mlook") but without sucess. I'm having this error:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
Cell In[38], line 3
      1 #ds_sbas = sbas.sync_cube(xr.merge([intf, corr]), 'intf_sbas')
----> 3 ds_sbas = sbas.open_stack("intf_mlook")
      4 ds_sbas = ds_sbas.where(landmask_ra.interp_like(ds_sbas))
      5 intf_sbas = ds_sbas.phase

File ~/.local/lib/python3.11/site-packages/pygmtsar/IO.py:652, in IO.open_stack(self, name, stack)
    649     chunksize = self.chunksize
    650 del data0
--> 652 data = xr.open_mfdataset(
    653     filenames,
    654     engine=self.netcdf_engine,
    655     chunks=chunksize,
    656     parallel=True,
    657     concat_dim='stackvar',
    658     combine='nested'
    659 )
    661 if 'stack' in data.dims:
    662     if 'y' in data.coords and 'x' in data.coords:

File /opt/conda/lib/python3.11/site-packages/xarray/backends/api.py:1040, in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, data_vars, coords, combine, parallel, join, attrs_file, combine_attrs, **kwargs)
   1036 try:
   1037     if combine == "nested":
   1038         # Combined nested list by successive concat and merge operations
   1039         # along each dimension, using structure given by "ids"
-> 1040         combined = _nested_combine(
   1041             datasets,
   1042             concat_dims=concat_dim,
   1043             compat=compat,
   1044             data_vars=data_vars,
   1045             coords=coords,
   1046             ids=ids,
   1047             join=join,
   1048             combine_attrs=combine_attrs,
   1049         )
   1050     elif combine == "by_coords":
   1051         # Redo ordering from coordinates, ignoring how they were ordered
   1052         # previously
   1053         combined = combine_by_coords(
   1054             datasets,
   1055             compat=compat,
   (...)
   1059             combine_attrs=combine_attrs,
   1060         )

File /opt/conda/lib/python3.11/site-packages/xarray/core/combine.py:356, in _nested_combine(datasets, concat_dims, compat, data_vars, coords, ids, fill_value, join, combine_attrs)
    353 _check_shape_tile_ids(combined_ids)
    355 # Apply series of concatenate or merge operations along each dimension
--> 356 combined = _combine_nd(
    357     combined_ids,
    358     concat_dims,
    359     compat=compat,
    360     data_vars=data_vars,
    361     coords=coords,
    362     fill_value=fill_value,
    363     join=join,
    364     combine_attrs=combine_attrs,
    365 )
    366 return combined

File /opt/conda/lib/python3.11/site-packages/xarray/core/combine.py:232, in _combine_nd(combined_ids, concat_dims, data_vars, coords, compat, fill_value, join, combine_attrs)
    228 # Each iteration of this loop reduces the length of the tile_ids tuples
    229 # by one. It always combines along the first dimension, removing the first
    230 # element of the tuple
    231 for concat_dim in concat_dims:
--> 232     combined_ids = _combine_all_along_first_dim(
    233         combined_ids,
    234         dim=concat_dim,
    235         data_vars=data_vars,
    236         coords=coords,
    237         compat=compat,
    238         fill_value=fill_value,
    239         join=join,
    240         combine_attrs=combine_attrs,
    241     )
    242 (combined_ds,) = combined_ids.values()
    243 return combined_ds

File /opt/conda/lib/python3.11/site-packages/xarray/core/combine.py:267, in _combine_all_along_first_dim(combined_ids, dim, data_vars, coords, compat, fill_value, join, combine_attrs)
    265     combined_ids = dict(sorted(group))
    266     datasets = combined_ids.values()
--> 267     new_combined_ids[new_id] = _combine_1d(
    268         datasets, dim, compat, data_vars, coords, fill_value, join, combine_attrs
    269     )
    270 return new_combined_ids

File /opt/conda/lib/python3.11/site-packages/xarray/core/combine.py:290, in _combine_1d(datasets, concat_dim, compat, data_vars, coords, fill_value, join, combine_attrs)
    288 if concat_dim is not None:
    289     try:
--> 290         combined = concat(
    291             datasets,
    292             dim=concat_dim,
    293             data_vars=data_vars,
    294             coords=coords,
    295             compat=compat,
    296             fill_value=fill_value,
    297             join=join,
    298             combine_attrs=combine_attrs,
    299         )
    300     except ValueError as err:
    301         if "encountered unexpected variable" in str(err):

File /opt/conda/lib/python3.11/site-packages/xarray/core/concat.py:252, in concat(objs, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs)
    240     return _dataarray_concat(
    241         objs,
    242         dim=dim,
   (...)
    249         combine_attrs=combine_attrs,
    250     )
    251 elif isinstance(first_obj, Dataset):
--> 252     return _dataset_concat(
    253         objs,
    254         dim=dim,
    255         data_vars=data_vars,
    256         coords=coords,
    257         compat=compat,
    258         positions=positions,
    259         fill_value=fill_value,
    260         join=join,
    261         combine_attrs=combine_attrs,
    262     )
    263 else:
    264     raise TypeError(
    265         "can only concatenate xarray Dataset and DataArray "
    266         f"objects, got {type(first_obj)}"
    267     )

File /opt/conda/lib/python3.11/site-packages/xarray/core/concat.py:487, in _dataset_concat(datasets, dim, data_vars, coords, compat, positions, fill_value, join, combine_attrs)
    484 # Make sure we're working on a copy (we'll be loading variables)
    485 datasets = [ds.copy() for ds in datasets]
    486 datasets = list(
--> 487     align(*datasets, join=join, copy=False, exclude=[dim], fill_value=fill_value)
    488 )
    490 dim_coords, dims_sizes, coord_names, data_names, vars_order = _parse_datasets(
    491     datasets
    492 )
    493 dim_names = set(dim_coords)

File /opt/conda/lib/python3.11/site-packages/xarray/core/alignment.py:888, in align(join, copy, indexes, exclude, fill_value, *objects)
    692 """
    693 Given any number of Dataset and/or DataArray objects, returns new
    694 objects with aligned indexes and dimension sizes.
   (...)
    878 
    879 """
    880 aligner = Aligner(
    881     objects,
    882     join=join,
   (...)
    886     fill_value=fill_value,
    887 )
--> 888 aligner.align()
    889 return aligner.results

File /opt/conda/lib/python3.11/site-packages/xarray/core/alignment.py:575, in Aligner.align(self)
    573 self.assert_no_index_conflict()
    574 self.align_indexes()
--> 575 self.assert_unindexed_dim_sizes_equal()
    577 if self.join == "override":
    578     self.override_indexes()

File /opt/conda/lib/python3.11/site-packages/xarray/core/alignment.py:476, in Aligner.assert_unindexed_dim_sizes_equal(self)
    474     add_err_msg = ""
    475 if len(sizes) > 1:
--> 476     raise ValueError(
    477         f"cannot reindex or align along dimension {dim!r} "
    478         f"because of conflicting dimension sizes: {sizes!r}" + add_err_msg
    479     )

ValueError: cannot reindex or align along dimension 'x' because of conflicting dimension sizes: {5968, 6080, 5967}

Anyone with the same problem?

@AlexeyPechnikov
Copy link
Owner

It seems your intf_mlook stack is inconsistent, with rasters for different dates saved in varying sizes. Did you use a single call to sbas.compute_interferogram_multilook() to produce the stack? You can delete the stack using sbas.delete_stack('intf_mlook') and recreate it to ensure all rasters are aligned to the same grid.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants