@article{,
title= {LMR: A Large-Scale Multi-Reference Dataset for Reference-based Super-Resolution},
journal= {},
author= {},
year= {},
url= {https://hkustconnect-my.sharepoint.com/:f:/g/personal/yhuangdl_connect_ust_hk/EoEEiF3R611HsECZcWfbtdEBW4y-IAbzdEhOatEzazolZw?e=5hIA9Q},
abstract= {It is widely agreed that reference-based super-resolution (RefSR) achieves superior results by referring to similar high quality images, compared to single image super-resolution (SISR). Intuitively, the more references, the better performance. However, previous RefSR methods have all focused on single-reference image training, while multiple reference images are often available in testing or practical applications. The root cause of such training-testing mismatch is the absence of publicly available multi-reference SR training datasets, which greatly hinders research efforts on multi-reference super-resolution. To this end, we construct a large-scale, multi-reference super-resolution dataset, named LMR. It contains 112,142 groups of 300x300 training images, which is 10x of the existing largest RefSR dataset. The image size is also much larger. More importantly, each group is equipped with 5 reference images with different similarity levels. Furthermore, we propose a new baseline method for multi-reference super-resolution: MRefSR, including a Multi-Reference Attention Module (MAM) for feature fusion of an arbitrary number of reference images, and a Spatial Aware Filtering Module (SAFM) for the fused feature selection. The proposed MRefSR achieves significant improvements over state-of-the-art approaches on both quantitative and qualitative evaluations.},
keywords= {},
terms= {},
license= {},
superseded= {}
}