@inproceedings{7856ea5897614162b6be555431dd9782,
title = "Variational Networks: Connecting Variational Methods and Deep Learning",
abstract = "In this paper, we introduce variational networks (VNs) for image reconstruction. VNs are fully learned models based on the framework of incremental proximal gradient methods. They provide a natural transition between classical variational methods and state-of-the-art residual neural networks. Due to their incremental nature, VNs are very efficient, but only approximately minimize the underlying variational model. Surprisingly, in our numerical experiments on image reconstruction problems it turns out that giving up exact minimization leads to a consistent performance increase, in particular in the case of convex models.",
keywords = "variational methods, machine learning",
author = "Erich Kobler and Teresa Klatzer and Kerstin Hammernik and Thomas Pock",
year = "2017",
doi = "10.1007/978-3-319-66709-6_23",
language = "English",
isbn = "978-3-319-66708-9",
series = "Lecture Notes in Computer Science",
publisher = "Springer",
pages = "281--293",
booktitle = "Pattern Recognition",
}