@article{Dong:200187,
title = {Learning Laplacian Matrix in Smooth Graph Signal Representations},
author = {Dong, Xiaowen and Thanou, Dorina and Frossard, Pascal and Vandergheynst, Pierre},
publisher = {Institute of Electrical and Electronics Engineers},
journal = {IEEE Transactions on Signal Processing},
address = {Piscataway},
number = {23},
volume = {64},
pages = {14. 6160-6173},
year = {2016},
abstract = {The construction of a meaningful graph plays a crucial role in the success of many graph-based representations and algorithms for handling structured data, especially in the emerging field of graph signal processing. However, a meaningful graph is not always readily available from the data, nor easy to define depending on the application domain. In particular, it is often desirable in graph signal processing applications that a graph is chosen such that the data admit certain regularity or smoothness on the graph. In this paper, we address the problem of learning graph Laplacians, which is equivalent to learning graph topologies, such that the input data form graph signals with smooth variations on the resulting topology. To this end, we adopt a factor analysis model for the graph signals and impose a Gaussian probabilistic prior on the latent variables that control these signals. We show that the Gaussian prior leads to an efficient representation that favors the smoothness property of the graph signals. We then propose an algorithm for learning graphs that enforce such property and is based on minimizing the variations of the signals on the learned graph. Experiments on both synthetic and real world data demonstrate that the proposed graph learning framework can lead to efficiently inferring meaningful graph topologies from signal observations under the smoothness prior.},
url = {http://infoscience.epfl.ch/record/200187},
doi = {10.1109/TSP.2016.2602809},
}