data.tar.gz | 29.89GB |
Type: Paper
Tags:
Bibtex:
Tags:
Bibtex:
@article{boudiaf2020information, title= {data.tar.gz}, author= {Boudiaf, Malik and Ziko, Imtiaz and Rony, Jerome and Dolz, Jose and Piantanida, Pablo and Ben Ayed, Ismail}, journal= {Advances in Neural Information Processing Systems}, volume= {33}, pages= {2445--2457}, year= {2020}, abstract= {We introduce Transductive Infomation Maximization (TIM) for few-shot learning. Our method maximizes the mutual information between the query features and their label predictions for a given few-shot task, in conjunction with a supervision loss based on the support set. Furthermore, we propose a new alternating-direction solver for our mutual-information loss, which substantially speeds up transductive-inference convergence over gradient-based optimization, while yielding similar accuracy. TIM inference is modular: it can be used on top of any base-training feature extractor. Following standard transductive few-shot settings, our comprehensive experiments demonstrate that TIM outperforms state-of-the-art methods significantly across various datasets and networks, while used on top of a fixed feature extractor trained with simple cross-entropy on the base classes, without resorting to complex meta-learning schemes. It consistently brings between 2% and 5% improvement in accuracy over the best performing method, not only on all the well-established few-shot benchmarks but also on more challenging scenarios,with domain shifts and larger numbers of classes.}, keywords= {}, terms= {}, license= {}, superseded= {}, url= {} }
No comments yet
Add a comment