### 2013

Koch, Tobias; Lapidoth, Amos

At Low SNR, Asymmetric Quantizers are Better Artículo de revista

En: IEEE Transactions on Information Theory, vol. 59, no 9, pp. 5421–5445, 2013, ISSN: 0018-9448.

Resumen | Enlaces | BibTeX | Etiquetas: 1-bit quantizer, asymmetric signaling constellation, asymmetric threshold quantizers, asymptotic power loss, Capacity per unit energy, channel capacity, discrete-time Gaussian channel, flash-signaling input distribution, Gaussian channel, Gaussian channels, low signal-to-noise ratio (SNR), quantisation (signal), quantization, Rayleigh channels, Rayleigh-fading channel, signal-to-noise ratio, SNR, spectral efficiency

@article{Koch2013,

title = {At Low SNR, Asymmetric Quantizers are Better},

author = {Tobias Koch and Amos Lapidoth},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6545291},

issn = {0018-9448},

year = {2013},

date = {2013-01-01},

journal = {IEEE Transactions on Information Theory},

volume = {59},

number = {9},

pages = {5421--5445},

abstract = {We study the capacity of the discrete-time Gaussian channel when its output is quantized with a 1-bit quantizer. We focus on the low signal-to-noise ratio (SNR) regime, where communication at very low spectral efficiencies takes place. In this regime, a symmetric threshold quantizer is known to reduce channel capacity by a factor of 2/$pi$, i.e., to cause an asymptotic power loss of approximately 2 dB. Here, it is shown that this power loss can be avoided by using asymmetric threshold quantizers and asymmetric signaling constellations. To avoid this power loss, flash-signaling input distributions are essential. Consequently, 1-bit output quantization of the Gaussian channel reduces spectral efficiency. Threshold quantizers are not only asymptotically optimal: at every fixed SNR, a threshold quantizer maximizes capacity among all 1-bit output quantizers. The picture changes on the Rayleigh-fading channel. In the noncoherent case, a 1-bit output quantizer causes an unavoidable low-SNR asymptotic power loss. In the coherent case, however, this power loss is avoidable provided that we allow the quantizer to depend on the fading level.},

keywords = {1-bit quantizer, asymmetric signaling constellation, asymmetric threshold quantizers, asymptotic power loss, Capacity per unit energy, channel capacity, discrete-time Gaussian channel, flash-signaling input distribution, Gaussian channel, Gaussian channels, low signal-to-noise ratio (SNR), quantisation (signal), quantization, Rayleigh channels, Rayleigh-fading channel, signal-to-noise ratio, SNR, spectral efficiency},

pubstate = {published},

tppubtype = {article}

}

### 2012

Pastore, Adriano; Koch, Tobias; Fonollosa, Javier Rodriguez

Improved Capacity Lower Bounds for Fading Channels with Imperfect CSI Using Rate Splitting Proceedings Article

En: 2012 IEEE 27th Convention of Electrical and Electronics Engineers in Israel, pp. 1–5, IEEE, Eilat, 2012, ISBN: 978-1-4673-4681-8.

Resumen | Enlaces | BibTeX | Etiquetas: channel capacity, channel capacity lower bounds, conditional entropy, Decoding, Entropy, Fading, fading channels, Gaussian channel, Gaussian channels, Gaussian random variable, imperfect channel-state information, imperfect CSI, independent Gaussian variables, linear minimum mean-square error, mean square error methods, Medard lower bound, Mutual information, Random variables, rate splitting approach, Resource management, Upper bound, wireless communications

@inproceedings{Pastore2012,

title = {Improved Capacity Lower Bounds for Fading Channels with Imperfect CSI Using Rate Splitting},

author = {Adriano Pastore and Tobias Koch and Javier Rodriguez Fonollosa},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6377031},

isbn = {978-1-4673-4681-8},

year = {2012},

date = {2012-01-01},

booktitle = {2012 IEEE 27th Convention of Electrical and Electronics Engineers in Israel},

pages = {1--5},

publisher = {IEEE},

address = {Eilat},

abstract = {As shown by Medard (“The effect upon channel capacity in wireless communications of perfect and imperfect knowledge of the channel,” IEEE Trans. Inform. Theory, May 2000), the capacity of fading channels with imperfect channel-state information (CSI) can be lower-bounded by assuming a Gaussian channel input X, and by upper-bounding the conditional entropy h(XY, Ĥ), conditioned on the channel output Y and the CSI Ĥ, by the entropy of a Gaussian random variable with variance equal to the linear minimum mean-square error in estimating X from (Y, Ĥ). We demonstrate that, by using a rate-splitting approach, this lower bound can be sharpened: we show that by expressing the Gaussian input X as as the sum of two independent Gaussian variables X(1) and X(2), and by applying Medard's lower bound first to analyze the mutual information between X(1) and Y conditioned on Ĥ while treating X(2) as noise, and by applying the lower bound then to analyze the mutual information between X(2) and Y conditioned on (X(1), Ĥ), we obtain a lower bound on the capacity that is larger than Medard's lower bound.},

keywords = {channel capacity, channel capacity lower bounds, conditional entropy, Decoding, Entropy, Fading, fading channels, Gaussian channel, Gaussian channels, Gaussian random variable, imperfect channel-state information, imperfect CSI, independent Gaussian variables, linear minimum mean-square error, mean square error methods, Medard lower bound, Mutual information, Random variables, rate splitting approach, Resource management, Upper bound, wireless communications},

pubstate = {published},

tppubtype = {inproceedings}

}

Taborda, Camilo G; Perez-Cruz, Fernando

Mutual Information and Relative Entropy over the Binomial and Negative Binomial Channels Proceedings Article

En: 2012 IEEE International Symposium on Information Theory Proceedings, pp. 696–700, IEEE, Cambridge, MA, 2012, ISSN: 2157-8095.

Resumen | Enlaces | BibTeX | Etiquetas: Channel estimation, conditional mean estimation, Entropy, Estimation, estimation theoretical quantity, estimation theory, Gaussian channel, Gaussian channels, information theory concept, loss function, mean square error methods, Mutual information, negative binomial channel, Poisson channel, Random variables, relative entropy

@inproceedings{Taborda2012a,

title = {Mutual Information and Relative Entropy over the Binomial and Negative Binomial Channels},

author = {Camilo G Taborda and Fernando Perez-Cruz},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6284304},

issn = {2157-8095},

year = {2012},

date = {2012-01-01},

booktitle = {2012 IEEE International Symposium on Information Theory Proceedings},

pages = {696--700},

publisher = {IEEE},

address = {Cambridge, MA},

abstract = {We study the relation of the mutual information and relative entropy over the Binomial and Negative Binomial channels with estimation theoretical quantities, in which we extend already known results for Gaussian and Poisson channels. We establish general expressions for these information theory concepts with a direct connection with estimation theory through the conditional mean estimation and a particular loss function.},

keywords = {Channel estimation, conditional mean estimation, Entropy, Estimation, estimation theoretical quantity, estimation theory, Gaussian channel, Gaussian channels, information theory concept, loss function, mean square error methods, Mutual information, negative binomial channel, Poisson channel, Random variables, relative entropy},

pubstate = {published},

tppubtype = {inproceedings}

}