%Aigaion2 BibTeX export from Idiap Publications
%Sunday 22 December 2024 07:02:14 AM

@INPROCEEDINGS{Chavdarova_CVPR_2018,
         author = {Chavdarova, Tatjana and Fleuret, Francois},
       projects = {Idiap},
          title = {SGAN: An Alternative Training of Generative Adversarial Networks},
      booktitle = {Proceedings of the IEEE international conference on Computer Vision and Pattern Recognition},
           year = {2018},
          pages = {9407-9415},
      publisher = {IEEE},
       location = {Salt Lake City, UT},
           issn = {1063-6919},
           isbn = {978-1-5386-6420-9},
            doi = {10.1109/CVPR.2018.00980},
       abstract = {The Generative Adversarial Networks (GANs) have demonstrated impressive performance for data synthesis, and are now used in a wide range of computer vision tasks. In spite of this success, they gained a reputation for being difficult to train, what results in a time-consuming and human-involved development process to use them.
We consider an alternative training process, named SGAN, in which several adversarial "local" pairs of networks are trained independently so that a "global" supervising pair of networks can be trained against them. The goal is to train the global pair with the corresponding ensemble opponent for improved performances in terms of mode coverage. This approach aims at increasing the chances that learning will not stop for the global pair, preventing both to be trapped in an unsatisfactory local minimum, or to face oscillations often observed in practice. To guarantee the latter; the global pair never affects the local ones.
The rules of SGAN training are thus as follows: the global generator and discriminator are trained using the local discriminators and generators, respectively, whereas the local networks are trained with their fixed local opponent.
Experimental results on both toy and real-world problems demonstrate that this approach outperforms standard training in terms of better mitigating mode collapse, stability while converging and that it surprisingly, increases the convergence speed as well.}
}