### 2014

Koch, Tobias

On the Dither-Quantized Gaussian Channel at Low SNR Artículo en actas

En: 2014 IEEE International Symposium on Information Theory, pp. 186–190, IEEE, Honolulu, 2014, ISBN: 978-1-4799-5186-4.

Resumen | Enlaces | BibTeX | Etiquetas: Additive noise, channel capacity, dither quantized Gaussian channel, Entropy, Gaussian channels, low signal-to-noise-ratio, low-SNR asymptotic capacity, peak power constraint, peak-and-average-power-limited Gaussian channel, Quantization (signal), Signal to noise ratio

@inproceedings{Koch2014,

title = {On the Dither-Quantized Gaussian Channel at Low SNR},

author = {Tobias Koch},

url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=6874820},

isbn = {978-1-4799-5186-4},

year = {2014},

date = {2014-01-01},

booktitle = {2014 IEEE International Symposium on Information Theory},

pages = {186--190},

publisher = {IEEE},

address = {Honolulu},

abstract = {We study the capacity of the peak-and-average-power-limited Gaussian channel when its output is quantized using a dithered, infinite-level, uniform quantizer of step size $Delta$. We focus on the low signal-to-noise-ratio (SNR) regime, where communication at low spectral efficiencies takes place. We show that, when the peak-power constraint is absent, the low-SNR asymptotic capacity is equal to that of the unquantized channel irrespective of $Delta$. We further derive an expression for the low-SNR asymptotic capacity for finite peak-to-average-power ratios and evaluate it in the low- and high-resolution limit. We demonstrate that, in this case, the low-SNR asymptotic capacity converges to that of the unquantized channel when $Delta$ tends to zero, and it tends to zero when $Delta$ tends to infinity.},

keywords = {Additive noise, channel capacity, dither quantized Gaussian channel, Entropy, Gaussian channels, low signal-to-noise-ratio, low-SNR asymptotic capacity, peak power constraint, peak-and-average-power-limited Gaussian channel, Quantization (signal), Signal to noise ratio},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2013

Alvarado, Alex; Brannstrom, Fredrik; Agrell, Erik; Koch, Tobias

High-SNR Asymptotics of Mutual Information for Discrete Constellations Artículo en actas

En: 2013 IEEE International Symposium on Information Theory, pp. 2274–2278, IEEE, Istanbul, 2013, ISSN: 2157-8095.

Resumen | Enlaces | BibTeX | Etiquetas: AWGN channels, discrete constellations, Entropy, Fading, Gaussian Q-function, high-SNR asymptotics, IP networks, least mean squares methods, minimum mean-square error, MMSE, Mutual information, scalar additive white Gaussian noise channel, Signal to noise ratio, signal-to-noise ratio, Upper bound

@inproceedings{Alvarado2013b,

title = {High-SNR Asymptotics of Mutual Information for Discrete Constellations},

author = {Alex Alvarado and Fredrik Brannstrom and Erik Agrell and Tobias Koch},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6620631},

issn = {2157-8095},

year = {2013},

date = {2013-01-01},

booktitle = {2013 IEEE International Symposium on Information Theory},

pages = {2274--2278},

publisher = {IEEE},

address = {Istanbul},

abstract = {The asymptotic behavior of the mutual information (MI) at high signal-to-noise ratio (SNR) for discrete constellations over the scalar additive white Gaussian noise channel is studied. Exact asymptotic expressions for the MI for arbitrary one-dimensional constellations and input distributions are presented in the limit as the SNR tends to infinity. Asymptotics of the minimum mean-square error (MMSE) are also developed. It is shown that for any input distribution, the MI and the MMSE have an asymptotic behavior proportional to a Gaussian Q-function, whose argument depends on the minimum Euclidean distance of the constellation and the SNR. Closed-form expressions for the coefficients of these Q-functions are calculated.},

keywords = {AWGN channels, discrete constellations, Entropy, Fading, Gaussian Q-function, high-SNR asymptotics, IP networks, least mean squares methods, minimum mean-square error, MMSE, Mutual information, scalar additive white Gaussian noise channel, Signal to noise ratio, signal-to-noise ratio, Upper bound},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2012

Koch, Tobias; Martinez, Alfonso; i Fabregas, Albert Guillen

The Capacity Loss of Dense Constellations Artículo en actas

En: 2012 IEEE International Symposium on Information Theory Proceedings, pp. 572–576, IEEE, Cambridge, MA, 2012, ISSN: 2157-8095.

Resumen | Enlaces | BibTeX | Etiquetas: capacity loss, channel capacity, Constellation diagram, dense constellations, Entropy, general complex-valued additive-noise channels, high signal-to-noise ratio, loss 1.53 dB, power loss, Quadrature amplitude modulation, Random variables, signal constellations, Signal processing, Signal to noise ratio, square signal constellations, Upper bound

@inproceedings{Koch2012,

title = {The Capacity Loss of Dense Constellations},

author = {Tobias Koch and Alfonso Martinez and Albert Guillen i Fabregas},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6283482},

issn = {2157-8095},

year = {2012},

date = {2012-01-01},

booktitle = {2012 IEEE International Symposium on Information Theory Proceedings},

pages = {572--576},

publisher = {IEEE},

address = {Cambridge, MA},

abstract = {We determine the loss in capacity incurred by using signal constellations with a bounded support over general complex-valued additive-noise channels for suitably high signal-to-noise ratio. Our expression for the capacity loss recovers the power loss of 1.53 dB for square signal constellations.},

keywords = {capacity loss, channel capacity, Constellation diagram, dense constellations, Entropy, general complex-valued additive-noise channels, high signal-to-noise ratio, loss 1.53 dB, power loss, Quadrature amplitude modulation, Random variables, signal constellations, Signal processing, Signal to noise ratio, square signal constellations, Upper bound},

pubstate = {published},

tppubtype = {inproceedings}

}

Taborda, Camilo G; Perez-Cruz, Fernando

Derivative of the Relative Entropy over the Poisson and Binomial Channel Artículo en actas

En: 2012 IEEE Information Theory Workshop, pp. 386–390, IEEE, Lausanne, 2012, ISBN: 978-1-4673-0223-4.

Resumen | Enlaces | BibTeX | Etiquetas: binomial channel, binomial distribution, Channel estimation, conditional distribution, Entropy, Estimation, function expectation, Mutual information, mutual information concept, Poisson channel, Poisson distribution, Random variables, relative entropy derivative, similar expression

@inproceedings{Taborda2012,

title = {Derivative of the Relative Entropy over the Poisson and Binomial Channel},

author = {Camilo G Taborda and Fernando Perez-Cruz},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6404699},

isbn = {978-1-4673-0223-4},

year = {2012},

date = {2012-01-01},

booktitle = {2012 IEEE Information Theory Workshop},

pages = {386--390},

publisher = {IEEE},

address = {Lausanne},

abstract = {In this paper it is found that, regardless of the statistics of the input, the derivative of the relative entropy over the Binomial channel can be seen as the expectation of a function that has as argument the mean of the conditional distribution that models the channel. Based on this relationship we formulate a similar expression for the mutual information concept. In addition to this, using the connection between the Binomial and Poisson distribution we develop similar results for the Poisson channel. Novelty of the results presented here lies on the fact that, expressions obtained can be applied to a wide range of scenarios.},

keywords = {binomial channel, binomial distribution, Channel estimation, conditional distribution, Entropy, Estimation, function expectation, Mutual information, mutual information concept, Poisson channel, Poisson distribution, Random variables, relative entropy derivative, similar expression},

pubstate = {published},

tppubtype = {inproceedings}

}

Pastore, Adriano; Koch, Tobias; Fonollosa, Javier Rodriguez

Improved Capacity Lower Bounds for Fading Channels with Imperfect CSI Using Rate Splitting Artículo en actas

En: 2012 IEEE 27th Convention of Electrical and Electronics Engineers in Israel, pp. 1–5, IEEE, Eilat, 2012, ISBN: 978-1-4673-4681-8.

Resumen | Enlaces | BibTeX | Etiquetas: channel capacity, channel capacity lower bounds, conditional entropy, Decoding, Entropy, Fading, fading channels, Gaussian channel, Gaussian channels, Gaussian random variable, imperfect channel-state information, imperfect CSI, independent Gaussian variables, linear minimum mean-square error, mean square error methods, Medard lower bound, Mutual information, Random variables, rate splitting approach, Resource management, Upper bound, wireless communications

@inproceedings{Pastore2012,

title = {Improved Capacity Lower Bounds for Fading Channels with Imperfect CSI Using Rate Splitting},

author = {Adriano Pastore and Tobias Koch and Javier Rodriguez Fonollosa},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6377031},

isbn = {978-1-4673-4681-8},

year = {2012},

date = {2012-01-01},

booktitle = {2012 IEEE 27th Convention of Electrical and Electronics Engineers in Israel},

pages = {1--5},

publisher = {IEEE},

address = {Eilat},

abstract = {As shown by Medard (“The effect upon channel capacity in wireless communications of perfect and imperfect knowledge of the channel,” IEEE Trans. Inform. Theory, May 2000), the capacity of fading channels with imperfect channel-state information (CSI) can be lower-bounded by assuming a Gaussian channel input X, and by upper-bounding the conditional entropy h(XY, Ĥ), conditioned on the channel output Y and the CSI Ĥ, by the entropy of a Gaussian random variable with variance equal to the linear minimum mean-square error in estimating X from (Y, Ĥ). We demonstrate that, by using a rate-splitting approach, this lower bound can be sharpened: we show that by expressing the Gaussian input X as as the sum of two independent Gaussian variables X(1) and X(2), and by applying Medard's lower bound first to analyze the mutual information between X(1) and Y conditioned on Ĥ while treating X(2) as noise, and by applying the lower bound then to analyze the mutual information between X(2) and Y conditioned on (X(1), Ĥ), we obtain a lower bound on the capacity that is larger than Medard's lower bound.},

keywords = {channel capacity, channel capacity lower bounds, conditional entropy, Decoding, Entropy, Fading, fading channels, Gaussian channel, Gaussian channels, Gaussian random variable, imperfect channel-state information, imperfect CSI, independent Gaussian variables, linear minimum mean-square error, mean square error methods, Medard lower bound, Mutual information, Random variables, rate splitting approach, Resource management, Upper bound, wireless communications},

pubstate = {published},

tppubtype = {inproceedings}

}

Taborda, Camilo G; Perez-Cruz, Fernando

Mutual Information and Relative Entropy over the Binomial and Negative Binomial Channels Artículo en actas

En: 2012 IEEE International Symposium on Information Theory Proceedings, pp. 696–700, IEEE, Cambridge, MA, 2012, ISSN: 2157-8095.

Resumen | Enlaces | BibTeX | Etiquetas: Channel estimation, conditional mean estimation, Entropy, Estimation, estimation theoretical quantity, estimation theory, Gaussian channel, Gaussian channels, information theory concept, loss function, mean square error methods, Mutual information, negative binomial channel, Poisson channel, Random variables, relative entropy

@inproceedings{Taborda2012a,

title = {Mutual Information and Relative Entropy over the Binomial and Negative Binomial Channels},

author = {Camilo G Taborda and Fernando Perez-Cruz},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=6284304},

issn = {2157-8095},

year = {2012},

date = {2012-01-01},

booktitle = {2012 IEEE International Symposium on Information Theory Proceedings},

pages = {696--700},

publisher = {IEEE},

address = {Cambridge, MA},

abstract = {We study the relation of the mutual information and relative entropy over the Binomial and Negative Binomial channels with estimation theoretical quantities, in which we extend already known results for Gaussian and Poisson channels. We establish general expressions for these information theory concepts with a direct connection with estimation theory through the conditional mean estimation and a particular loss function.},

keywords = {Channel estimation, conditional mean estimation, Entropy, Estimation, estimation theoretical quantity, estimation theory, Gaussian channel, Gaussian channels, information theory concept, loss function, mean square error methods, Mutual information, negative binomial channel, Poisson channel, Random variables, relative entropy},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2009

Fresia, Maria; Perez-Cruz, Fernando; Poor, Vincent H

Optimized Concatenated LDPC Codes for Joint Source-Channel Coding Artículo en actas

En: 2009 IEEE International Symposium on Information Theory, pp. 2131–2135, IEEE, Seoul, 2009, ISBN: 978-1-4244-4312-3.

Resumen | Enlaces | BibTeX | Etiquetas: approximation theory, asymptotic behavior analysis, Channel Coding, combined source-channel coding, Concatenated codes, Decoding, Entropy, EXIT chart, extrinsic information transfer, H infinity control, Information analysis, joint belief propagation decoder, joint source-channel coding, low-density-parity-check code, optimized concatenated independent LDPC codes, parity check codes, Redundancy, source coding, transmitter, Transmitters

@inproceedings{Fresia2009,

title = {Optimized Concatenated LDPC Codes for Joint Source-Channel Coding},

author = {Maria Fresia and Fernando Perez-Cruz and Vincent H Poor},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=5205766},

isbn = {978-1-4244-4312-3},

year = {2009},

date = {2009-01-01},

booktitle = {2009 IEEE International Symposium on Information Theory},

pages = {2131--2135},

publisher = {IEEE},

address = {Seoul},

abstract = {In this paper a scheme for joint source-channel coding based on low-density-parity-check (LDPC) codes is investigated. Two concatenated independent LDPC codes are used in the transmitter: one for source coding and the other for channel coding, with a joint belief propagation decoder. The asymptotic behavior is analyzed using EXtrinsic Information Transfer (EXIT) charts and this approximation is corroborated with illustrative experiments. The optimization of the degree distributions for our sparse code to maximize the information transmission rate is also considered.},

keywords = {approximation theory, asymptotic behavior analysis, Channel Coding, combined source-channel coding, Concatenated codes, Decoding, Entropy, EXIT chart, extrinsic information transfer, H infinity control, Information analysis, joint belief propagation decoder, joint source-channel coding, low-density-parity-check code, optimized concatenated independent LDPC codes, parity check codes, Redundancy, source coding, transmitter, Transmitters},

pubstate = {published},

tppubtype = {inproceedings}

}

### 2008

Koch, Tobias; Lapidoth, Amos

On Multipath Fading Channels at High SNR Artículo en actas

En: 2008 IEEE International Symposium on Information Theory, pp. 1572–1576, IEEE, Toronto, 2008, ISBN: 978-1-4244-2256-2.

Resumen | Enlaces | BibTeX | Etiquetas: channel capacity, Delay, discrete time systems, discrete-time channels, Entropy, Fading, fading channels, Frequency, Mathematical model, multipath channels, multipath fading channels, noncoherent channel model, Random variables, Signal to noise ratio, signal-to-noise ratios, SNR, statistics, Transmitters

@inproceedings{Koch2008,

title = {On Multipath Fading Channels at High SNR},

author = {Tobias Koch and Amos Lapidoth},

url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=4595252},

isbn = {978-1-4244-2256-2},

year = {2008},

date = {2008-01-01},

booktitle = {2008 IEEE International Symposium on Information Theory},

pages = {1572--1576},

publisher = {IEEE},

address = {Toronto},

abstract = {This paper studies the capacity of discrete-time multipath fading channels. It is assumed that the number of paths is finite, i.e., that the channel output is influenced by the present and by the L previous channel inputs. A noncoherent channel model is considered where neither transmitter nor receiver are cognizant of the fading's realization, but both are aware of its statistic. The focus is on capacity at high signal-to-noise ratios (SNR). In particular, the capacity pre-loglog-defined as the limiting ratio of the capacity to loglog(SNR) as SNR tends to infinity-is studied. It is shown that, irrespective of the number of paths L, the capacity pre-loglog is 1.},

keywords = {channel capacity, Delay, discrete time systems, discrete-time channels, Entropy, Fading, fading channels, Frequency, Mathematical model, multipath channels, multipath fading channels, noncoherent channel model, Random variables, Signal to noise ratio, signal-to-noise ratios, SNR, statistics, Transmitters},

pubstate = {published},

tppubtype = {inproceedings}

}

Perez-Cruz, Fernando

Kullback-Leibler Divergence Estimation of Continuous Distributions Artículo en actas

En: 2008 IEEE International Symposium on Information Theory, pp. 1666–1670, IEEE, Toronto, 2008, ISBN: 978-1-4244-2256-2.

Resumen | Enlaces | BibTeX | Etiquetas: Convergence, density estimation, Density measurement, Entropy, Frequency estimation, H infinity control, information theory, k-nearest-neighbour density estimation, Kullback-Leibler divergence estimation, Machine learning, Mutual information, neuroscience, Random variables, statistical distributions, waiting-times distributions

@inproceedings{Perez-Cruz2008,

title = {Kullback-Leibler Divergence Estimation of Continuous Distributions},

author = {Fernando Perez-Cruz},

url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4595271},

isbn = {978-1-4244-2256-2},

year = {2008},

date = {2008-01-01},

booktitle = {2008 IEEE International Symposium on Information Theory},

pages = {1666--1670},

publisher = {IEEE},

address = {Toronto},

abstract = {We present a method for estimating the KL divergence between continuous densities and we prove it converges almost surely. Divergence estimation is typically solved estimating the densities first. Our main result shows this intermediate step is unnecessary and that the divergence can be either estimated using the empirical cdf or k-nearest-neighbour density estimation, which does not converge to the true measure for finite k. The convergence proof is based on describing the statistics of our estimator using waiting-times distributions, as the exponential or Erlang. We illustrate the proposed estimators and show how they compare to existing methods based on density estimation, and we also outline how our divergence estimators can be used for solving the two-sample problem.},

keywords = {Convergence, density estimation, Density measurement, Entropy, Frequency estimation, H infinity control, information theory, k-nearest-neighbour density estimation, Kullback-Leibler divergence estimation, Machine learning, Mutual information, neuroscience, Random variables, statistical distributions, waiting-times distributions},

pubstate = {published},

tppubtype = {inproceedings}

}

Koch, Tobias; Lapidoth, Amos

Multipath Channels of Unbounded Capacity Artículo en actas

En: 2008 IEEE 25th Convention of Electrical and Electronics Engineers in Israel, pp. 640–644, IEEE, Eilat, 2008, ISBN: 978-1-4244-2481-8.

Resumen | Enlaces | BibTeX | Etiquetas: channel capacity, discrete-time capacity, Entropy, Fading, fading channels, Frequency, H infinity control, Information rates, multipath channels, multipath fading channels, noncoherent, noncoherent capacity, path gains decay, Signal to noise ratio, statistics, Transmitters, unbounded capacity

@inproceedings{Koch2008b,

title = {Multipath Channels of Unbounded Capacity},

author = {Tobias Koch and Amos Lapidoth},

url = {http://ieeexplore.ieee.org/articleDetails.jsp?arnumber=4736611},

isbn = {978-1-4244-2481-8},

year = {2008},

date = {2008-01-01},

booktitle = {2008 IEEE 25th Convention of Electrical and Electronics Engineers in Israel},

pages = {640--644},

publisher = {IEEE},

address = {Eilat},

abstract = {The capacity of discrete-time, noncoherent, multipath fading channels is considered. It is shown that if the variances of the path gains decay faster than exponentially, then capacity is unbounded in the transmit power.},

keywords = {channel capacity, discrete-time capacity, Entropy, Fading, fading channels, Frequency, H infinity control, Information rates, multipath channels, multipath fading channels, noncoherent, noncoherent capacity, path gains decay, Signal to noise ratio, statistics, Transmitters, unbounded capacity},

pubstate = {published},

tppubtype = {inproceedings}

}