@article{3265, keywords = {Attention Model, Convolutional Neural Network (CNN), Target Recognition, Synthetic Aperture Radar}, author = {Chiagoziem C. Ukwuoma and Qin Zhiguang and Bole W. Tienin and Sophyani B. Yussif and Chukwuebuka J. Ejiyi and Gilbert C. Urama and Chibueze D. Ukwuoma and Ijeoma A. Chikwendu}, title = {Synthetic Aperture Radar Automatic Target Recognition Based on a Simple Attention Mechanism}, abstract = {A simple but effective channel attention module is proposed for Synthetic Aperture Radar (SAR) Automatic Target Recognition (ATR). The channel attention technique has shown recent success in improving Deep Convolutional Neural Networks (CNN). The resolution of SAR images does not surpass optical images thus information flow of SAR images becomes relatively poor when the network depth is raised blindly leading to a serious gradients explosion/vanishing. To resolve the issue of SAR image recognition efficiency and ambiguity trade-off, we proposed a simple Channel Attention module into the ResNet Architecture as our network backbone, which utilizes few parameters yet results in a performance gain. Our simple attention module, which follows the implementation of Efficient Channel Attention, shows that avoiding dimensionality reduction is essential for learning as well as an appropriate cross-channel interaction can preserve performance and decrease model complexity. We also explored the One Policy Learning Rate on the ResNet-50 architecture and compared it with the proposed attention based ResNet-50 architecture. A thorough analysis of the MSTAR Dataset demonstrates the efficacy of the suggested strategy over the most recent findings. With the Attention-based model and the One Policy Learning Rate-based architecture, we were able to obtain recognition rate of 100% and 99.8%, respectively.}, year = {2023}, journal = {International Journal of Interactive Multimedia and Artificial Intelligence}, volume = {8}, chapter = {67}, number = {4}, pages = {67-77}, month = {12/2023}, issn = {1989-1660}, url = {https://www.ijimai.org/journal/sites/default/files/2023-11/ijimai8_4_6.pdf}, doi = {10.9781/ijimai.2023.02.004}, }