2014
A, Pastore; Koch, Tobias; Fonollosa, Javier Rodriguez
A Rate-Splitting Approach to Fading Channels With Imperfect Channel-State Information Artículo de revista
En: IEEE Transactions on Information Theory, vol. 60, no 7, pp. 4266–4285, 2014, ISSN: 0018-9448.
Resumen | Enlaces | BibTeX | Etiquetas: channel capacity, COMONSENS, DEIPRO, Entropy, Fading, fading channels, flat fading, imperfect channel-state information, MobileNET, Mutual information, OTOSiS, Random variables, Receivers, Signal to noise ratio, Upper bound
@article{Pastore2014a,
title = {A Rate-Splitting Approach to Fading Channels With Imperfect Channel-State Information},
author = {Pastore A and Tobias Koch and Javier Rodriguez Fonollosa},
url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=6832779 http://www.tsc.uc3m.es/~koch/files/IEEE_TIT_60(7).pdf http://arxiv.org/pdf/1301.6120.pdf},
issn = {0018-9448},
year = {2014},
date = {2014-01-01},
journal = {IEEE Transactions on Information Theory},
volume = {60},
number = {7},
pages = {4266--4285},
publisher = {IEEE},
abstract = {As shown by M\'{e}dard, the capacity of fading channels with imperfect channel-state information can be lower-bounded by assuming a Gaussian channel input (X) with power (P) and by upper-bounding the conditional entropy (h(X|Y,hat {H})) by the entropy of a Gaussian random variable with variance equal to the linear minimum mean-square error in estimating (X) from ((Y,hat {H})) . We demonstrate that, using a rate-splitting approach, this lower bound can be sharpened: by expressing the Gaussian input (X) as the sum of two independent Gaussian variables (X_1) and (X_2) and by applying M\'{e}dard's lower bound first to bound the mutual information between (X_1) and (Y) while treating (X_2) as noise, and by applying it a second time to the mutual information between (X_2) and (Y) while assuming (X_1) to be known, we obtain a capacity lower bound that is strictly larger than M\'{e}dard's lower bound. We then generalize this approach to an arbi- rary number (L) of layers, where (X) is expressed as the sum of (L) independent Gaussian random variables of respective variances (P_ell ) , (ell = 1,dotsc ,L) summing up to (P) . Among all such rate-splitting bounds, we determine the supremum over power allocations (P_ell ) and total number of layers (L) . This supremum is achieved for (L rightarrow infty ) and gives rise to an analytically expressible capacity lower bound. For Gaussian fading, this novel bound is shown to converge to the Gaussian-input mutual information as the signal-to-noise ratio (SNR) grows, provided that the variance of the channel estimation error (H-hat {H}) tends to zero as the SNR tends to infinity.},
keywords = {channel capacity, COMONSENS, DEIPRO, Entropy, Fading, fading channels, flat fading, imperfect channel-state information, MobileNET, Mutual information, OTOSiS, Random variables, Receivers, Signal to noise ratio, Upper bound},
pubstate = {published},
tppubtype = {article}
}
2010
Koch, Tobias; Lapidoth, Amos
Gaussian Fading Is the Worst Fading Artículo de revista
En: IEEE Transactions on Information Theory, vol. 56, no 3, pp. 1158–1165, 2010, ISSN: 0018-9448.
Resumen | Enlaces | BibTeX | Etiquetas: Additive noise, channel capacity, channels with memory, Distribution functions, ergodic fading processes, Fading, fading channels, flat fading, flat-fading channel capacity, Gaussian channels, Gaussian fading, Gaussian processes, H infinity control, high signal-to-noise ratio (SNR), Information technology, information theory, multiple-input single-output fading channels, multiplexing gain, noncoherent, noncoherent channel capacity, peak-power limited channel capacity, Signal to noise ratio, signal-to-noise ratio, single-antenna channel capacity, spectral distribution function, time-selective, Transmitters
@article{Koch2010a,
title = {Gaussian Fading Is the Worst Fading},
author = {Tobias Koch and Amos Lapidoth},
url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5429105},
issn = {0018-9448},
year = {2010},
date = {2010-01-01},
journal = {IEEE Transactions on Information Theory},
volume = {56},
number = {3},
pages = {1158--1165},
abstract = {The capacity of peak-power limited, single-antenna, noncoherent, flat-fading channels with memory is considered. The emphasis is on the capacity pre-log, i.e., on the limiting ratio of channel capacity to the logarithm of the signal-to-noise ratio (SNR), as the SNR tends to infinity. It is shown that, among all stationary and ergodic fading processes of a given spectral distribution function and whose law has no mass point at zero, the Gaussian process gives rise to the smallest pre-log. The assumption that the law of the fading process has no mass point at zero is essential in the sense that there exist stationary and ergodic fading processes whose law has a mass point at zero and that give rise to a smaller pre-log than the Gaussian process of equal spectral distribution function. An extension of these results to multiple-input single-output (MISO) fading channels with memory is also presented.},
keywords = {Additive noise, channel capacity, channels with memory, Distribution functions, ergodic fading processes, Fading, fading channels, flat fading, flat-fading channel capacity, Gaussian channels, Gaussian fading, Gaussian processes, H infinity control, high signal-to-noise ratio (SNR), Information technology, information theory, multiple-input single-output fading channels, multiplexing gain, noncoherent, noncoherent channel capacity, peak-power limited channel capacity, Signal to noise ratio, signal-to-noise ratio, single-antenna channel capacity, spectral distribution function, time-selective, Transmitters},
pubstate = {published},
tppubtype = {article}
}