@inproceedings{
  author = {S. Hassanpour and M. Hummert and D. W\"{u}bben and A. Dekorsy},
  year = {2025},
  month = {Jun},
  title = {Deep Learning-Based Distributed Remote Source Coding via Information Bottleneck Method: The Parallel Processing Scheme},
  URL = {https://icc2025.ieee-icc.org/},
  address={Montreal, Canada},
  abstract={We focus on a generic multiterminal remote source coding scenario, appearing in a variety of real-world applications. Specifically, several noisy observations from a remote user/source signal should be quantized at some intermediate nodes prior to a forward transmission via multiple error-free and rate-limited links to a processing unit. To design the local quantizers, we follow the Information Bottleneck method, and devise a purely data-driven solution, which can be categorized as a Latent Variable Model in the context of generative AI. To that end, we derive a tractable variational lower-bound of the original objective functional, and present the pertinent learning architecture, over which, the design problem can be addressed by the joint training of the encoder DNNs and the decoder DNN, e.g., by some form of the Stochastic Gradient Descent. By several numerical investigations, we further show that this data-driven compression scheme performs (almost) on par with the SotA model-based approach, without requiring the prior knowledge of the (joint) statistics of input signals. This becomes quite important, especially, in those applications where the joint input statistics are either unavailable or hard to estimate.},
  booktitle={IEEE International Conference on Communications Workshops (ICC Workshops 2025)}
}