%Aigaion2 BibTeX export from Idiap Publications
%Saturday 21 December 2024 07:32:35 PM

@PHDTHESIS{Canevet_THESIS_2017,
         author = {Can{\'{e}}vet, Olivier},
       keywords = {Bootstrapping, computer vision, Image classification, Importance Sampling, machine learning, monte carlo tree search, object detection},
       projects = {Idiap, DASH},
          month = feb,
          title = {Object Detection with Active Sample Harvesting},
           year = {2017},
         school = {{\'{E}}cole Polytechnique F{\'{e}}d{\'{e}}rale de Lausanne},
       abstract = {The work presented in this dissertation lies in the domains of image
classification, object detection, and machine learning. Whether it is
training image classifiers or object detectors, the learning phase
consists in finding an optimal boundary between populations of
samples. In practice, all the samples are not equally important: some
examples are trivially classified and do not bring much to the
training, while others close to the boundary or misclassified are the
ones that truly matter. Similarly, images where the samples originate
from are not all rich in informative samples. However, most training
procedures select samples and images uniformly or weigh them equally.

The common thread of this dissertation is how to efficiently find the
informative samples/images for training. Although we never consider
all the possible samples "in the world", our purpose is to select
the samples in a smarter manner, without looking at all the available
ones.

The framework adopted in this work consists in organising the data
(samples or images) in a tree to reflect the statistical regularities
of the training samples, by putting "similar" samples in the same
branch. Each leaf carries a sample and a weight related to the
"importance" of the corresponding sample, and each internal node
carries statistics about the weights below. The tree is used to select
the next sample/image for training, by applying a sampling policy, and
the "importance" weights are updated accordingly, to bias the
sampling towards informative samples/images in future iterations.

Our experiments show that, in the various applications, properly
focusing on informative images or informative samples improves the
learning phase by either reaching better performances faster or by
reducing the training loss faster.},
            pdf = {https://publications.idiap.ch/attachments/papers/2017/Canevet_THESIS_2017.pdf}
}