{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,1,21]],"date-time":"2026-01-21T19:19:28Z","timestamp":1769023168698,"version":"3.49.0"},"reference-count":73,"publisher":"MDPI AG","issue":"7","license":[{"start":{"date-parts":[[2024,6,26]],"date-time":"2024-06-26T00:00:00Z","timestamp":1719360000000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/creativecommons.org\/licenses\/by\/4.0\/"}],"funder":[{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62101481"],"award-info":[{"award-number":["62101481"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["62261060"],"award-info":[{"award-number":["62261060"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["12202377"],"award-info":[{"award-number":["12202377"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202201AT070112"],"award-info":[{"award-number":["202201AT070112"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202301AW070007"],"award-info":[{"award-number":["202301AW070007"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202201AU070033"],"award-info":[{"award-number":["202201AU070033"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202301AU070210"],"award-info":[{"award-number":["202301AU070210"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202005AC160007"],"award-info":[{"award-number":["202005AC160007"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202301AT070407"],"award-info":[{"award-number":["202301AT070407"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202302AD080006"],"award-info":[{"award-number":["202302AD080006"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202202AD080002"],"award-info":[{"award-number":["202202AD080002"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["202305AF150078"],"award-info":[{"award-number":["202305AF150078"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["C619300A020"],"award-info":[{"award-number":["C619300A020"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["KC-22221218"],"award-info":[{"award-number":["KC-22221218"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100001809","name":"National Natural Science Foundation of China","doi-asserted-by":"publisher","award":["TM-23236845"],"award-info":[{"award-number":["TM-23236845"]}],"id":[{"id":"10.13039\/501100001809","id-type":"DOI","asserted-by":"publisher"}]},{"name":"Yunnan Fundamental Research Projects","award":["62101481"],"award-info":[{"award-number":["62101481"]}]},{"name":"Yunnan Fundamental Research Projects","award":["62261060"],"award-info":[{"award-number":["62261060"]}]},{"name":"Yunnan Fundamental Research Projects","award":["12202377"],"award-info":[{"award-number":["12202377"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202201AT070112"],"award-info":[{"award-number":["202201AT070112"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202301AW070007"],"award-info":[{"award-number":["202301AW070007"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202201AU070033"],"award-info":[{"award-number":["202201AU070033"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202301AU070210"],"award-info":[{"award-number":["202301AU070210"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202005AC160007"],"award-info":[{"award-number":["202005AC160007"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202301AT070407"],"award-info":[{"award-number":["202301AT070407"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202302AD080006"],"award-info":[{"award-number":["202302AD080006"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202202AD080002"],"award-info":[{"award-number":["202202AD080002"]}]},{"name":"Yunnan Fundamental Research Projects","award":["202305AF150078"],"award-info":[{"award-number":["202305AF150078"]}]},{"name":"Yunnan Fundamental Research Projects","award":["C619300A020"],"award-info":[{"award-number":["C619300A020"]}]},{"name":"Yunnan Fundamental Research Projects","award":["KC-22221218"],"award-info":[{"award-number":["KC-22221218"]}]},{"name":"Yunnan Fundamental Research Projects","award":["TM-23236845"],"award-info":[{"award-number":["TM-23236845"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["62101481"],"award-info":[{"award-number":["62101481"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["62261060"],"award-info":[{"award-number":["62261060"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["12202377"],"award-info":[{"award-number":["12202377"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202201AT070112"],"award-info":[{"award-number":["202201AT070112"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202301AW070007"],"award-info":[{"award-number":["202301AW070007"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202201AU070033"],"award-info":[{"award-number":["202201AU070033"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202301AU070210"],"award-info":[{"award-number":["202301AU070210"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202005AC160007"],"award-info":[{"award-number":["202005AC160007"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202301AT070407"],"award-info":[{"award-number":["202301AT070407"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202302AD080006"],"award-info":[{"award-number":["202302AD080006"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202202AD080002"],"award-info":[{"award-number":["202202AD080002"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["202305AF150078"],"award-info":[{"award-number":["202305AF150078"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["C619300A020"],"award-info":[{"award-number":["C619300A020"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["KC-22221218"],"award-info":[{"award-number":["KC-22221218"]}]},{"name":"Major Scientific and Technological Project of Yunnan Province","award":["TM-23236845"],"award-info":[{"award-number":["TM-23236845"]}]},{"name":"Yunnan Province Expert Workstations","award":["62101481"],"award-info":[{"award-number":["62101481"]}]},{"name":"Yunnan Province Expert Workstations","award":["62261060"],"award-info":[{"award-number":["62261060"]}]},{"name":"Yunnan Province Expert Workstations","award":["12202377"],"award-info":[{"award-number":["12202377"]}]},{"name":"Yunnan Province Expert Workstations","award":["202201AT070112"],"award-info":[{"award-number":["202201AT070112"]}]},{"name":"Yunnan Province Expert Workstations","award":["202301AW070007"],"award-info":[{"award-number":["202301AW070007"]}]},{"name":"Yunnan Province Expert Workstations","award":["202201AU070033"],"award-info":[{"award-number":["202201AU070033"]}]},{"name":"Yunnan Province Expert Workstations","award":["202301AU070210"],"award-info":[{"award-number":["202301AU070210"]}]},{"name":"Yunnan Province Expert Workstations","award":["202005AC160007"],"award-info":[{"award-number":["202005AC160007"]}]},{"name":"Yunnan Province Expert Workstations","award":["202301AT070407"],"award-info":[{"award-number":["202301AT070407"]}]},{"name":"Yunnan Province Expert Workstations","award":["202302AD080006"],"award-info":[{"award-number":["202302AD080006"]}]},{"name":"Yunnan Province Expert Workstations","award":["202202AD080002"],"award-info":[{"award-number":["202202AD080002"]}]},{"name":"Yunnan Province Expert Workstations","award":["202305AF150078"],"award-info":[{"award-number":["202305AF150078"]}]},{"name":"Yunnan Province Expert Workstations","award":["C619300A020"],"award-info":[{"award-number":["C619300A020"]}]},{"name":"Yunnan Province Expert Workstations","award":["KC-22221218"],"award-info":[{"award-number":["KC-22221218"]}]},{"name":"Yunnan Province Expert Workstations","award":["TM-23236845"],"award-info":[{"award-number":["TM-23236845"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["62101481"],"award-info":[{"award-number":["62101481"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["62261060"],"award-info":[{"award-number":["62261060"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["12202377"],"award-info":[{"award-number":["12202377"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202201AT070112"],"award-info":[{"award-number":["202201AT070112"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202301AW070007"],"award-info":[{"award-number":["202301AW070007"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202201AU070033"],"award-info":[{"award-number":["202201AU070033"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202301AU070210"],"award-info":[{"award-number":["202301AU070210"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202005AC160007"],"award-info":[{"award-number":["202005AC160007"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202301AT070407"],"award-info":[{"award-number":["202301AT070407"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202302AD080006"],"award-info":[{"award-number":["202302AD080006"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202202AD080002"],"award-info":[{"award-number":["202202AD080002"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["202305AF150078"],"award-info":[{"award-number":["202305AF150078"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["C619300A020"],"award-info":[{"award-number":["C619300A020"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["KC-22221218"],"award-info":[{"award-number":["KC-22221218"]}]},{"name":"High-Level Talents Thousand Plan of Yunnan Province in China","award":["TM-23236845"],"award-info":[{"award-number":["TM-23236845"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["62101481"],"award-info":[{"award-number":["62101481"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["62261060"],"award-info":[{"award-number":["62261060"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["12202377"],"award-info":[{"award-number":["12202377"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202201AT070112"],"award-info":[{"award-number":["202201AT070112"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202301AW070007"],"award-info":[{"award-number":["202301AW070007"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202201AU070033"],"award-info":[{"award-number":["202201AU070033"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202301AU070210"],"award-info":[{"award-number":["202301AU070210"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202005AC160007"],"award-info":[{"award-number":["202005AC160007"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202301AT070407"],"award-info":[{"award-number":["202301AT070407"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202302AD080006"],"award-info":[{"award-number":["202302AD080006"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202202AD080002"],"award-info":[{"award-number":["202202AD080002"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202305AF150078"],"award-info":[{"award-number":["202305AF150078"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["C619300A020"],"award-info":[{"award-number":["C619300A020"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["KC-22221218"],"award-info":[{"award-number":["KC-22221218"]}]},{"name":"14th Research Innovation Project for Postgraduate Students of Yunnan University","award":["TM-23236845"],"award-info":[{"award-number":["TM-23236845"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["62101481"],"award-info":[{"award-number":["62101481"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["62261060"],"award-info":[{"award-number":["62261060"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["12202377"],"award-info":[{"award-number":["12202377"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202201AT070112"],"award-info":[{"award-number":["202201AT070112"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202301AW070007"],"award-info":[{"award-number":["202301AW070007"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202201AU070033"],"award-info":[{"award-number":["202201AU070033"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202301AU070210"],"award-info":[{"award-number":["202301AU070210"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202005AC160007"],"award-info":[{"award-number":["202005AC160007"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202301AT070407"],"award-info":[{"award-number":["202301AT070407"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202302AD080006"],"award-info":[{"award-number":["202302AD080006"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202202AD080002"],"award-info":[{"award-number":["202202AD080002"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["202305AF150078"],"award-info":[{"award-number":["202305AF150078"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["C619300A020"],"award-info":[{"award-number":["C619300A020"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["KC-22221218"],"award-info":[{"award-number":["KC-22221218"]}]},{"name":"15th Research Innovation Project for Postgraduate Students of Yunnan University","award":["TM-23236845"],"award-info":[{"award-number":["TM-23236845"]}]}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["IJGI"],"abstract":"<jats:p>Pansharpening is the fusion of panchromatic images and multispectral images to obtain images with high spatial resolution and high spectral resolution, which have a wide range of applications. At present, methods based on deep learning can fit the nonlinear features of images and achieve excellent image quality; however, the images generated with supervised learning approaches lack real-world applicability. Therefore, in this study, we propose an unsupervised pansharpening method based on a generative adversarial network. Considering the fine tubular structures in remote sensing images, a dense connection attention module is designed based on dynamic snake convolution to recover the details of spatial information. In the stage of image fusion, the fusion of features in groups is applied through the cross-scale attention fusion module. Moreover, skip layers are implemented at different scales to integrate significant information, thus improving the objective index values and visual appearance. The loss function contains four constraints, allowing the model to be effectively trained without reference images. The experimental results demonstrate that the proposed method outperforms other widely accepted state-of-the-art methods on the QuickBird and WorldView2 data sets.<\/jats:p>","DOI":"10.3390\/ijgi13070222","type":"journal-article","created":{"date-parts":[[2024,6,26]],"date-time":"2024-06-26T05:03:07Z","timestamp":1719378187000},"page":"222","update-policy":"https:\/\/doi.org\/10.3390\/mdpi_crossmark_policy","source":"Crossref","is-referenced-by-count":1,"title":["UPGAN: An Unsupervised Generative Adversarial Network Based on U-Shaped Structure for Pansharpening"],"prefix":"10.3390","volume":"13","author":[{"ORCID":"https:\/\/orcid.org\/0000-0003-2211-2006","authenticated-orcid":false,"given":"Xin","family":"Jin","sequence":"first","affiliation":[{"name":"Engineering Research Center of Cyberspace, Yunnan University, Kunming 650000, China"},{"name":"School of Software, Yunnan University, Kunming 650000, China"}]},{"given":"Yuting","family":"Feng","sequence":"additional","affiliation":[{"name":"Engineering Research Center of Cyberspace, Yunnan University, Kunming 650000, China"},{"name":"School of Software, Yunnan University, Kunming 650000, China"}]},{"given":"Qian","family":"Jiang","sequence":"additional","affiliation":[{"name":"Engineering Research Center of Cyberspace, Yunnan University, Kunming 650000, China"}]},{"given":"Shengfa","family":"Miao","sequence":"additional","affiliation":[{"name":"School of Software, Yunnan University, Kunming 650000, China"}]},{"ORCID":"https:\/\/orcid.org\/0000-0001-8654-7178","authenticated-orcid":false,"given":"Xing","family":"Chu","sequence":"additional","affiliation":[{"name":"Engineering Research Center of Cyberspace, Yunnan University, Kunming 650000, China"},{"name":"School of Software, Yunnan University, Kunming 650000, China"}]},{"given":"Huangqimei","family":"Zheng","sequence":"additional","affiliation":[{"name":"Engineering Research Center of Cyberspace, Yunnan University, Kunming 650000, China"},{"name":"School of Software, Yunnan University, Kunming 650000, China"}]},{"given":"Qianqian","family":"Wang","sequence":"additional","affiliation":[{"name":"Engineering Research Center of Cyberspace, Yunnan University, Kunming 650000, China"},{"name":"School of Software, Yunnan University, Kunming 650000, China"}]}],"member":"1968","published-online":{"date-parts":[[2024,6,26]]},"reference":[{"key":"ref_1","doi-asserted-by":"crossref","unstructured":"Guo, H., He, G., Jiang, W., Yin, R., Yan, L., and Leng, W. (2020). A Multi-Scale Water Extraction Convolutional Neural Network (MWEN) Method for GaoFen-1 Remote Sensing Images. ISPRS Int. J. Geo-Inf., 9.","DOI":"10.3390\/ijgi9040189"},{"key":"ref_2","doi-asserted-by":"crossref","unstructured":"Li, S., Lin, Y., and Huang, H. (2024). Relief Supply-Demand Estimation Based on Social Media in Typhoon Disasters Using Deep Learning and a Spatial Information Diffusion Model. ISPRS Int. J. Geo-Inf., 13.","DOI":"10.3390\/ijgi13010029"},{"key":"ref_3","doi-asserted-by":"crossref","unstructured":"Grz\u0105dziel, A. (2022). Application of Remote Sensing Techniques to Identification of Underwater Airplane Wreck in Shallow Water Environment: Case Study of the Baltic Sea, Poland. Remote Sens., 14.","DOI":"10.3390\/rs14205195"},{"key":"ref_4","doi-asserted-by":"crossref","unstructured":"Huang, W., Feng, J., Hua, W., and Sun, L. (2020). A New Architecture of Densely Connected Convolutional Networks for Pan-Sharpening. ISPRS Int. J. Geo-Inf., 9.","DOI":"10.3390\/ijgi9040242"},{"key":"ref_5","doi-asserted-by":"crossref","unstructured":"Weng, L., Xu, Y., Xia, M., Zhang, Y., Liu, J., and Xu, Y. (2020). Water Areas Segmentation from Remote Sensing Images Using a Separable Residual SegNet Network. ISPRS Int. J. Geo-Inf., 9.","DOI":"10.3390\/ijgi9040256"},{"key":"ref_6","doi-asserted-by":"crossref","unstructured":"Tarverdiyev, V., Erer, I., Kaplan, N.H., and Musao\u011flu, N. (2022, January 17\u201322). Target Detection in Multispectral Images via Detail Enhanced Pansharpening. Proceedings of the IGARSS 2022\u20142022 IEEE International Geoscience and Remote Sensing Symposium, Kuala Lumpur, Malaysia.","DOI":"10.1109\/IGARSS46834.2022.9884355"},{"key":"ref_7","doi-asserted-by":"crossref","first-page":"036507","DOI":"10.1117\/1.JRS.17.036507","article-title":"Comparative analysis of deep learning-based pansharpening methods for improved image classification accuracy","volume":"17","author":"Yilmaz","year":"2023","journal-title":"J. Appl. Remote Sens."},{"key":"ref_8","unstructured":"Goodfellow, I.J., Pouget-Abadie, J., Mirza, M., Xu, B., Warde-Farley, D., Ozair, S., Courville, A., and Bengio, Y. (2014, January 8\u201313). Generative adversarial nets. Proceedings of the Advances in Neural Information Processing Systems, Montreal, QC, Canada."},{"key":"ref_9","doi-asserted-by":"crossref","unstructured":"Navab, N., Hornegger, J., Wells, W.M., and Frangi, A.F. (2015, January 5\u20139). U-Net: Convolutional Networks for Biomedical Image Segmentation. Proceedings of the Medical Image Computing and Computer-Assisted Intervention\u2014MICCAI 2015, Munich, Germany.","DOI":"10.1007\/978-3-319-24571-3"},{"key":"ref_10","doi-asserted-by":"crossref","first-page":"53","DOI":"10.1109\/MGRS.2020.3019315","article-title":"A New Benchmark Based on Recent Advances in Multispectral Pansharpening: Revisiting Pansharpening with Classical and Emerging Pansharpening Methods","volume":"9","author":"Vivone","year":"2021","journal-title":"IEEE Geosci. Remote Sens. Mag."},{"key":"ref_11","first-page":"459","article-title":"The use of intensity-hue-saturation transformations for merging SPOT panchromatic and multispectral image data","volume":"56","author":"Carper","year":"1990","journal-title":"Photogramm. Eng. Remote Sens."},{"key":"ref_12","doi-asserted-by":"crossref","first-page":"309","DOI":"10.1109\/LGRS.2004.834804","article-title":"A fast intensity-hue-saturation fusion technique with spectral adjustment for IKONOS imagery","volume":"1","author":"Tu","year":"2004","journal-title":"IEEE Geosci. Remote Sens. Lett."},{"key":"ref_13","doi-asserted-by":"crossref","first-page":"177","DOI":"10.1016\/j.inffus.2010.09.003","article-title":"Fusion of multispectral and panchromatic images based on support value transform and adaptive principal component analysis","volume":"13","author":"Yang","year":"2012","journal-title":"Inf. Fusion"},{"key":"ref_14","doi-asserted-by":"crossref","first-page":"1927","DOI":"10.1109\/18.857802","article-title":"An information-theoretic approach to spectral variability, similarity, and discrimination for hyperspectral image analysis","volume":"46","author":"Chang","year":"2002","journal-title":"IEEE Trans. Inf. Theory"},{"key":"ref_15","doi-asserted-by":"crossref","first-page":"3461","DOI":"10.1080\/014311600750037499","article-title":"Smoothing filter-based intensity modulation: A spectral preserve image fusion technique for improving spatial details","volume":"21","author":"Jianguo","year":"2000","journal-title":"Int. J. Remote Sens."},{"key":"ref_16","doi-asserted-by":"crossref","first-page":"295","DOI":"10.1109\/TGRS.2010.2051674","article-title":"A New Adaptive Component-Substitution-Based Satellite Image Fusion by Using Partial Replacement","volume":"49","author":"Choi","year":"2011","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_17","doi-asserted-by":"crossref","first-page":"3230","DOI":"10.1109\/TGRS.2007.901007","article-title":"Improving Component Substitution Pansharpening Through Multivariate Regression of MS +Pan Data","volume":"45","author":"Aiazzi","year":"2007","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_18","doi-asserted-by":"crossref","unstructured":"N\u00fa\u00f1ez, J., Otazu, X., Fors, O., Prades, A., Pal\u00e1, V., and Arbiol, R. (1998, January 4). Data fusion of SPOT and LANDSAT images using additive multiresolution wavelet decomposition. Proceedings of the Image and Signal Processing for Remote Sensing IV, Barcelona, Spain.","DOI":"10.1117\/12.331865"},{"key":"ref_19","doi-asserted-by":"crossref","first-page":"17","DOI":"10.1016\/S1566-2535(01)00037-9","article-title":"Using the discrete wavelet frame transform to merge Landsat TM and SPOT panchromatic images","volume":"3","author":"Li","year":"2002","journal-title":"Inf. Fusion"},{"key":"ref_20","unstructured":"Aiazzi, B., Alparone, L., Baronti, S., Garzelli, A., and Selva, M. (2003, January 22\u201323). An MTF-based spectral distortion minimizing model for pan-sharpening of very high resolution multispectral images of urban areas. Proceedings of the 2003 2nd GRSS\/ISPRS Joint Workshop on Remote Sensing and Data Fusion over Urban Areas, Berlin, Germany."},{"key":"ref_21","doi-asserted-by":"crossref","first-page":"1323","DOI":"10.1109\/TGRS.2008.916211","article-title":"An Efficient Pan-Sharpening Method via a Combined Adaptive PCA Approach and Contourlets","volume":"46","author":"Shah","year":"2008","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_22","doi-asserted-by":"crossref","first-page":"591","DOI":"10.14358\/PERS.72.5.591","article-title":"MTF-tailored multiscale fusion of high-resolution MS and pan imagery","volume":"72","author":"Aiazzi","year":"2006","journal-title":"Photogramm. Eng. Remote. Sens."},{"key":"ref_23","doi-asserted-by":"crossref","first-page":"2563","DOI":"10.1109\/TGRS.2015.2503045","article-title":"Spatial Methods for Multispectral Pansharpening: Multiresolution Analysis Demystified","volume":"54","author":"Alparone","year":"2016","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_24","doi-asserted-by":"crossref","first-page":"43","DOI":"10.1007\/s11263-006-6852-x","article-title":"A Variational Model for P+XS Image Fusion","volume":"69","author":"Ballester","year":"2006","journal-title":"Int. J. Comput. Vis."},{"key":"ref_25","doi-asserted-by":"crossref","first-page":"4160","DOI":"10.1109\/TIP.2014.2333661","article-title":"A new pansharpening method based on spatial and spectral sparsity priors","volume":"23","author":"He","year":"2014","journal-title":"EEE Trans. Image Process."},{"key":"ref_26","doi-asserted-by":"crossref","first-page":"3453","DOI":"10.1109\/TGRS.2012.2184122","article-title":"A Bayesian Restoration Approach for Hyperspectral Images","volume":"50","author":"Zhang","year":"2012","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_27","doi-asserted-by":"crossref","first-page":"1847","DOI":"10.1109\/TGRS.2008.917131","article-title":"Bayesian Data Fusion for Adaptable Image Pansharpening","volume":"46","author":"Fasbender","year":"2008","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_28","doi-asserted-by":"crossref","first-page":"738","DOI":"10.1109\/TGRS.2010.2067219","article-title":"A New Pan-Sharpening Method Using a Compressed Sensing Technique","volume":"49","author":"Li","year":"2011","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_29","doi-asserted-by":"crossref","first-page":"180","DOI":"10.1109\/LGRS.2014.2331291","article-title":"A Pansharpening Method Based on the Sparse Representation of Injected Details","volume":"12","author":"Vicinanza","year":"2015","journal-title":"IEEE Geosci. Remote Sens. Lett."},{"key":"ref_30","doi-asserted-by":"crossref","first-page":"2954","DOI":"10.1109\/TIV.2022.3218833","article-title":"MUGAN: Thermal Infrared Image Colorization Using Mixed-Skipping UNet and Generative Adversarial Network","volume":"8","author":"Liao","year":"2023","journal-title":"IEEE Trans. Intell. Veh."},{"key":"ref_31","doi-asserted-by":"crossref","first-page":"5407915","DOI":"10.1109\/TGRS.2022.3154435","article-title":"A Deep Multitask Convolutional Neural Network for Remote Sensing Image Super-Resolution and Colorization","volume":"60","author":"Feng","year":"2022","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_32","doi-asserted-by":"crossref","first-page":"1037","DOI":"10.1109\/LGRS.2014.2376034","article-title":"A New Pan-Sharpening Method with Deep Neural Networks","volume":"12","author":"Huang","year":"2015","journal-title":"IEEE Geosci. Remote Sens. Lett."},{"key":"ref_33","first-page":"691","article-title":"Fusion of satellite images of different spatial resolutions: Assessing the quality of resulting images","volume":"63","author":"Lucien","year":"1997","journal-title":"Photogramm. Eng. Remote Sens."},{"key":"ref_34","doi-asserted-by":"crossref","unstructured":"Masi, G., Cozzolino, D., Verdoliva, L., and Scarpa, G. (2016). Pansharpening by Convolutional Neural Networks. Remote Sens., 8.","DOI":"10.3390\/rs8070594"},{"key":"ref_35","doi-asserted-by":"crossref","first-page":"5443","DOI":"10.1109\/TGRS.2018.2817393","article-title":"Target-Adaptive CNN-Based Pansharpening","volume":"56","author":"Scarpa","year":"2018","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_36","doi-asserted-by":"crossref","unstructured":"Yang, J., Fu, X., Hu, Y., Huang, Y., Ding, X., and Paisley, J. (2017, January 22\u201329). PanNet: A Deep Network Architecture for Pan-Sharpening. Proceedings of the IEEE International Conference on Computer Vision, Venice, Italy.","DOI":"10.1109\/ICCV.2017.193"},{"key":"ref_37","doi-asserted-by":"crossref","first-page":"292","DOI":"10.1016\/j.inffus.2022.08.018","article-title":"Supervised-unsupervised combined deep convolutional neural networks for high-fidelity pansharpening","volume":"89","author":"Liu","year":"2023","journal-title":"Inf. Fusion"},{"key":"ref_38","doi-asserted-by":"crossref","first-page":"5403517","DOI":"10.1109\/TGRS.2023.3281602","article-title":"A Unified Two-Stage Spatial and Spectral Network with Few-Shot Learning for Pansharpening","volume":"61","author":"Sheng","year":"2023","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_39","doi-asserted-by":"crossref","first-page":"4295","DOI":"10.1109\/JSTARS.2020.3008047","article-title":"Pansharpening via Unsupervised Convolutional Neural Networks","volume":"13","author":"Luo","year":"2020","journal-title":"IEEE J. Sel. Top. Appl. Earth Obs. Remote Sens."},{"key":"ref_40","doi-asserted-by":"crossref","first-page":"3192","DOI":"10.1109\/TGRS.2020.3009207","article-title":"Unsupervised Pansharpening Based on Self-Attention Mechanism","volume":"59","author":"Qu","year":"2021","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_41","doi-asserted-by":"crossref","first-page":"5468","DOI":"10.1109\/JSTARS.2022.3188181","article-title":"LDP-Net: An Unsupervised Pansharpening Network Based on Learnable Degradation Processes","volume":"15","author":"Ni","year":"2022","journal-title":"IEEE J. Sel. Top. Appl. Earth Obs. Remote Sens."},{"key":"ref_42","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1109\/TGRS.2023.3299356","article-title":"Unsupervised Deep Learning-Based Pansharpening with Jointly Enhanced Spectral and Spatial Fidelity","volume":"61","author":"Ciotola","year":"2023","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_43","doi-asserted-by":"crossref","unstructured":"Ciotola, M., and Scarpa, G. (2023). Fast Full-Resolution Target-Adaptive CNN-Based Pansharpening Framework. Remote Sens., 15.","DOI":"10.3390\/rs15020319"},{"key":"ref_44","doi-asserted-by":"crossref","first-page":"102001","DOI":"10.1016\/j.inffus.2023.102001","article-title":"Zero-shot semi-supervised learning for pansharpening","volume":"101","author":"Cao","year":"2024","journal-title":"Inf. Fusion"},{"key":"ref_45","doi-asserted-by":"crossref","first-page":"5412613","DOI":"10.1109\/TGRS.2022.3215902","article-title":"Deep SURE for Unsupervised Remote Sensing Image Fusion","volume":"60","author":"Nguyen","year":"2022","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_46","first-page":"5513505","article-title":"MetaPan: Unsupervised Adaptation with Meta-Learning for Multispectral Pansharpening","volume":"19","author":"Wang","year":"2022","journal-title":"IEEE Geosci. Remote Sens. Lett."},{"key":"ref_47","doi-asserted-by":"crossref","first-page":"110","DOI":"10.1016\/j.inffus.2020.04.006","article-title":"Pan-GAN: An unsupervised pan-sharpening method for remote sensing image fusion","volume":"62","author":"Ma","year":"2020","journal-title":"Inf. Fusion"},{"key":"ref_48","doi-asserted-by":"crossref","first-page":"4401611","DOI":"10.1109\/TGRS.2021.3060958","article-title":"Generative Adversarial Network for Pansharpening with Spectral and Spatial Discriminators","volume":"60","author":"Gastineau","year":"2022","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_49","doi-asserted-by":"crossref","unstructured":"Zhu, J.Y., Park, T., Isola, P., and Efros, A.A. (2017, January 22\u201329). Unpaired Image-to-Image Translation Using Cycle-Consistent Adversarial Networks. Proceedings of the IEEE International Conference on Computer Vision (ICCV), Venice, Italy.","DOI":"10.1109\/ICCV.2017.244"},{"key":"ref_50","first-page":"5511805","article-title":"Self-Supervised Pansharpening Based on a Cycle-Consistent Generative Adversarial Network","volume":"19","author":"Li","year":"2022","journal-title":"IEEE Geosci. Remote Sens. Lett."},{"key":"ref_51","first-page":"5408814","article-title":"Unsupervised Cycle-Consistent Generative Adversarial Networks for Pan Sharpening","volume":"60","author":"Zhou","year":"2022","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_52","doi-asserted-by":"crossref","first-page":"8195","DOI":"10.1109\/TNNLS.2021.3137373","article-title":"ZeRGAN: Zero-Reference GAN for Fusion of Multispectral and Panchromatic Images","volume":"34","author":"Diao","year":"2023","journal-title":"IEEE Trans. Neural Netw. Learn. Syst."},{"key":"ref_53","doi-asserted-by":"crossref","first-page":"3486","DOI":"10.1109\/TGRS.2020.3010441","article-title":"Rethinking CNN-Based Pansharpening: Guided Colorization of Panchromatic Images via GANs","volume":"59","author":"Ozcelik","year":"2021","journal-title":"IEEE Trans. Geosci. Remote Sens."},{"key":"ref_54","doi-asserted-by":"crossref","first-page":"3279","DOI":"10.1109\/TKDE.2021.3126456","article-title":"A General Survey on Attention Mechanisms in Deep Learning","volume":"35","author":"Brauwers","year":"2023","journal-title":"IEEE Trans. Knowl. Data Eng."},{"key":"ref_55","unstructured":"Mnih, V., Welling, M., Cortes, C., Lawrence, N., and Weinberger, K. (2014, January 8\u201313). Recurrent Models of Visual Attention. Proceedings of the 28th Conference on Neural Information Processing Systems (NIPS), Montreal, QC, Canada."},{"key":"ref_56","first-page":"2017","article-title":"Spatial transformer networks","volume":"28","author":"Jaderberg","year":"2015","journal-title":"Adv. Neural Inf. Process. Syst."},{"key":"ref_57","doi-asserted-by":"crossref","unstructured":"Hu, J., Shen, L., and Sun, G. (2018, January 18\u201323). Squeeze-and-Excitation Networks. Proceedings of the 2018 IEEE\/CVF Conference on Computer Vision and Pattern Recognition, Salt Lake City, UT, USA.","DOI":"10.1109\/CVPR.2018.00745"},{"key":"ref_58","doi-asserted-by":"crossref","unstructured":"Gao, Z., Xie, J., Wang, Q., and Li, P. (2019, January 15\u201320). Global Second-Order Pooling Convolutional Networks. Proceedings of the 2019 IEEE\/CVF Conference on Computer Vision and Pattern Recognition (CVPR), Long Beach, CA, USA.","DOI":"10.1109\/CVPR.2019.00314"},{"key":"ref_59","doi-asserted-by":"crossref","unstructured":"Lee, H., Kim, H.E., and Nam, H. (November, January 27). SRM: A Style-Based Recalibration Module for Convolutional Neural Networks. Proceedings of the 2019 IEEE\/CVF International Conference on Computer Vision (ICCV), Seoul, Republic of Korea.","DOI":"10.1109\/ICCV.2019.00194"},{"key":"ref_60","doi-asserted-by":"crossref","unstructured":"Woo, S., Park, J., Lee, J.Y., and Kweon, I.S. (2018, January 8\u201314). CBAM: Convolutional Block Attention Module. Proceedings of the Computer Vision\u2014ECCV 2018, Munich, Germany.","DOI":"10.1007\/978-3-030-01234-2_1"},{"key":"ref_61","unstructured":"Park, J., Woo, S., Lee, J.Y., and Kweon, I.S. (2018). BAM: Bottleneck Attention Module. arXiv."},{"key":"ref_62","doi-asserted-by":"crossref","first-page":"87","DOI":"10.1109\/TPAMI.2022.3152247","article-title":"A Survey on Vision Transformer","volume":"45","author":"Han","year":"2023","journal-title":"IEEE Trans. Pattern Anal. Mach. Intell."},{"key":"ref_63","doi-asserted-by":"crossref","unstructured":"Liu, Z., Lin, Y., Cao, Y., Hu, H., Wei, Y., Zhang, Z., Lin, S., and Guo, B. (2021, January 11\u201317). Swin Transformer: Hierarchical Vision Transformer using Shifted Windows. Proceedings of the 2021 IEEE\/CVF International Conference on Computer Vision (ICCV), Montreal, BC, Canada.","DOI":"10.1109\/ICCV48922.2021.00986"},{"key":"ref_64","doi-asserted-by":"crossref","unstructured":"Qi, Y., He, Y., Qi, X., Zhang, Y., and Yang, G. (2023, January 2\u20136). Dynamic Snake Convolution based on Topological Geometric Constraints for Tubular Structure Segmentation. Proceedings of the 2023 IEEE\/CVF International Conference on Computer Vision (ICCV), Paris, France.","DOI":"10.1109\/ICCV51070.2023.00558"},{"key":"ref_65","doi-asserted-by":"crossref","first-page":"193","DOI":"10.14358\/PERS.74.2.193","article-title":"Multispectral and Panchromatic Data Fusion Assessment without Reference","volume":"8","author":"Alparone","year":"2008","journal-title":"Photogramm. Eng. Remote Sens."},{"key":"ref_66","doi-asserted-by":"crossref","first-page":"600","DOI":"10.1109\/TIP.2003.819861","article-title":"Image quality assessment: From error visibility to structural similarity","volume":"13","author":"Wang","year":"2004","journal-title":"IEEE Trans. Image Process."},{"key":"ref_67","doi-asserted-by":"crossref","first-page":"140","DOI":"10.1007\/s11220-016-0135-6","article-title":"Remote Sensing Image Fusion with Convolutional Neural Network","volume":"17","author":"Zhong","year":"2016","journal-title":"Sens. Imaging"},{"key":"ref_68","unstructured":"Yuhas, R.H., Goetz, A.F.H., and Boardman, J.W. (1992, January 1\u20135). Discrimination among semi-arid landscape endmembers using the Spectral Angle Mapper (SAM) algorithm. Proceedings of the JPL, Summaries of the Third Annual JPL Airborne Geoscience Workshop, Pasadena, CA, USA."},{"key":"ref_69","unstructured":"Wald, L. (2000, January 26\u201328). Quality of high resolution synthesised images: Is there a simple criterion ?. Proceedings of the Third Conference \u201cFusion of Earth Data: Merging Point Measurements, Raster Maps and Remotely Sensed Images\u201d, Sophia Antipolis, France."},{"key":"ref_70","doi-asserted-by":"crossref","first-page":"81","DOI":"10.1109\/97.995823","article-title":"A universal image quality index","volume":"9","author":"Wang","year":"2002","journal-title":"IEEE Signal Process. Lett."},{"key":"ref_71","doi-asserted-by":"crossref","first-page":"168","DOI":"10.1109\/MGRS.2022.3170092","article-title":"Full-Resolution Quality Assessment of Pansharpening: Theoretical and hands-on approaches","volume":"10","author":"Arienzo","year":"2022","journal-title":"IEEE Geosci. Remote Sens. Mag."},{"key":"ref_72","doi-asserted-by":"crossref","unstructured":"Scarpa, G., and Ciotola, M. (2022). Full-Resolution Quality Assessment for Pansharpening. Remote Sens., 14.","DOI":"10.3390\/rs14081808"},{"key":"ref_73","doi-asserted-by":"crossref","first-page":"662","DOI":"10.1109\/LGRS.2009.2022650","article-title":"Hypercomplex Quality Assessment of Multi\/Hyperspectral Images","volume":"6","author":"Garzelli","year":"2009","journal-title":"IEEE Geosci. Remote Sens. Lett."}],"container-title":["ISPRS International Journal of Geo-Information"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/www.mdpi.com\/2220-9964\/13\/7\/222\/pdf","content-type":"unspecified","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,10,10]],"date-time":"2025-10-10T15:04:39Z","timestamp":1760108679000},"score":1,"resource":{"primary":{"URL":"https:\/\/www.mdpi.com\/2220-9964\/13\/7\/222"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2024,6,26]]},"references-count":73,"journal-issue":{"issue":"7","published-online":{"date-parts":[[2024,7]]}},"alternative-id":["ijgi13070222"],"URL":"https:\/\/doi.org\/10.3390\/ijgi13070222","relation":{},"ISSN":["2220-9964"],"issn-type":[{"value":"2220-9964","type":"electronic"}],"subject":[],"published":{"date-parts":[[2024,6,26]]}}}