{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,3,7]],"date-time":"2026-03-07T17:57:43Z","timestamp":1772906263113,"version":"3.50.1"},"reference-count":30,"publisher":"Springer Science and Business Media LLC","issue":"5","license":[{"start":{"date-parts":[[2025,3,26]],"date-time":"2025-03-26T00:00:00Z","timestamp":1742947200000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"},{"start":{"date-parts":[[2025,3,26]],"date-time":"2025-03-26T00:00:00Z","timestamp":1742947200000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springernature.com\/gp\/researchers\/text-and-data-mining"}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["J Supercomput"],"DOI":"10.1007\/s11227-025-07164-3","type":"journal-article","created":{"date-parts":[[2025,3,29]],"date-time":"2025-03-29T05:14:04Z","timestamp":1743225244000},"update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":2,"title":["Generating a large family of nonlinear activation functions (LFNAFs) in neural networks"],"prefix":"10.1007","volume":"81","author":[{"given":"Morteza","family":"Taheri","sequence":"first","affiliation":[]},{"given":"Sajad Haghzad","family":"Klidbary","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2025,3,26]]},"reference":[{"issue":"6","key":"7164_CR1","doi-asserted-by":"publisher","first-page":"84","DOI":"10.1145\/3065386","volume":"60","author":"A Krizhevsky","year":"2017","unstructured":"Krizhevsky A, Sutskever I, Hinton GE (2017) ImageNet classification with deep convolutional neural networks. Commun ACM 60(6):84\u201390","journal-title":"Commun ACM"},{"key":"7164_CR2","doi-asserted-by":"publisher","first-page":"490","DOI":"10.1016\/j.neucom.2021.06.067","volume":"458","author":"H Zhu","year":"2021","unstructured":"Zhu H et al (2021) Logish: a new nonlinear nonmonotonic activation function for convolutional neural networks. Neurocomputing 458:490\u2013499","journal-title":"Neurocomputing"},{"key":"7164_CR3","doi-asserted-by":"publisher","first-page":"631","DOI":"10.1007\/s13370-020-00850-w","volume":"32","author":"B Frasin","year":"2021","unstructured":"Frasin B, Swamy S, Nirmala J (2021) Some special families of holomorphic and Al-Oboudi type bi-univalent functions related to k-Fibonacci numbers involving modified Sigmoid activation function. Afr Mat 32:631\u2013643","journal-title":"Afr Mat"},{"key":"7164_CR4","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2023.119503","volume":"217","author":"S Kili\u00e7arslan","year":"2023","unstructured":"Kili\u00e7arslan S et al (2023) Detection and classification of pneumonia using novel Superior Exponential (SupEx) activation function in convolutional neural networks. Expert Syst Appl 217:119503","journal-title":"Expert Syst Appl"},{"issue":"24","key":"7164_CR5","doi-asserted-by":"publisher","first-page":"21729","DOI":"10.1007\/s00521-022-07625-3","volume":"34","author":"K Adem","year":"2022","unstructured":"Adem K (2022) P+ FELU: flexible and trainable fast exponential linear unit for deep learning architectures. Neural Comput Appl 34(24):21729\u201321740","journal-title":"Neural Comput Appl"},{"key":"7164_CR6","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2021.114805","volume":"174","author":"S Kili\u00e7arslan","year":"2021","unstructured":"Kili\u00e7arslan S, Celik M (2021) RSigELU: a nonlinear activation function for deep neural networks. Expert Syst Appl 174:114805","journal-title":"Expert Syst Appl"},{"issue":"3","key":"7164_CR7","doi-asserted-by":"publisher","first-page":"1478","DOI":"10.1109\/TNNLS.2021.3105444","volume":"34","author":"VM Vargas","year":"2021","unstructured":"Vargas VM et al (2021) Activation functions for convolutional neural networks: proposals and experimental study. IEEE Trans Neural Netw Learn Syst 34(3):1478\u20131488","journal-title":"IEEE Trans Neural Netw Learn Syst"},{"key":"7164_CR8","unstructured":"Ramachandran P, Zoph B, Le QV (2017) Searching for activation functions. arXiv preprint arXiv:1710.05941"},{"key":"7164_CR9","doi-asserted-by":"publisher","first-page":"73","DOI":"10.1016\/j.neucom.2021.02.030","volume":"442","author":"A Wuraola","year":"2021","unstructured":"Wuraola A, Patel N, Nguang SK (2021) Efficient activation functions for embedded inference engines. Neurocomputing 442:73\u201388","journal-title":"Neurocomputing"},{"key":"7164_CR10","doi-asserted-by":"publisher","DOI":"10.1016\/j.eswa.2021.115892","volume":"187","author":"L Parisi","year":"2022","unstructured":"Parisi L et al (2022) Quantum ReLU activation for convolutional neural networks to improve diagnosis of Parkinson\u2019s disease and COVID-19. Expert Syst Appl 187:115892","journal-title":"Expert Syst Appl"},{"issue":"4","key":"7164_CR11","doi-asserted-by":"publisher","first-page":"2637","DOI":"10.1007\/s12065-024-00908-9","volume":"17","author":"H Kalim","year":"2024","unstructured":"Kalim H, Chug A, Singh AP (2024) modSwish: a new activation function for neural networks. Evol Intell 17(4):2637\u20132647","journal-title":"Evol Intell"},{"issue":"4","key":"7164_CR12","doi-asserted-by":"publisher","first-page":"6345","DOI":"10.1007\/s11042-022-14313-w","volume":"82","author":"S Kili\u00e7arslan","year":"2023","unstructured":"Kili\u00e7arslan S (2023) A novel nonlinear hybrid HardSReLUE activation function in transfer learning architectures for hemorrhage classification. Multimedia Tools Appl 82(4):6345\u20136365","journal-title":"Multimedia Tools Appl"},{"key":"7164_CR13","unstructured":"Gulcehre C et al (2016) Noisy activation functions. In: International conference on machine learning. PMLR"},{"issue":"4","key":"7164_CR14","doi-asserted-by":"publisher","first-page":"540","DOI":"10.3390\/electronics11040540","volume":"11","author":"X Wang","year":"2022","unstructured":"Wang X, Ren H, Wang A (2022) Smish: a novel activation function for deep learning methods. Electronics 11(4):540","journal-title":"Electronics"},{"key":"7164_CR15","doi-asserted-by":"publisher","first-page":"99","DOI":"10.1016\/S0092-8240(05)80006-0","volume":"52","author":"WS McCulloch","year":"1990","unstructured":"McCulloch WS, Pitts W (1990) A logical calculus of the ideas immanent in nervous activity. Bull Math Biol 52:99\u2013115","journal-title":"Bull Math Biol"},{"issue":"7553","key":"7164_CR16","doi-asserted-by":"publisher","first-page":"436","DOI":"10.1038\/nature14539","volume":"521","author":"Y LeCun","year":"2015","unstructured":"LeCun Y, Bengio Y, Hinton G (2015) Deep learning. Nature 521(7553):436\u2013444","journal-title":"Nature"},{"key":"7164_CR17","doi-asserted-by":"publisher","first-page":"168626","DOI":"10.1109\/ACCESS.2024.3474574","volume":"12","author":"LS Pusztah\u00e1zi","year":"2024","unstructured":"Pusztah\u00e1zi LS, Eigner G, Csisz\u00e1r O (2024) Parametric activation functions for neural networks: a tutorial survey. IEEE Access 12:168626\u2013168644","journal-title":"IEEE Access"},{"key":"7164_CR18","doi-asserted-by":"publisher","first-page":"47794","DOI":"10.1109\/ACCESS.2023.3276298","volume":"11","author":"H Abdel-Nabi","year":"2023","unstructured":"Abdel-Nabi H et al (2023) HcLSH: a novel non-linear monotonic activation function for deep learning methods. IEEE Access 11:47794\u201347815","journal-title":"IEEE Access"},{"key":"7164_CR19","unstructured":"Tiwari S (2020) Activation functions in neural networks. geeksforgeeks.org"},{"issue":"13","key":"7164_CR20","doi-asserted-by":"publisher","first-page":"7595","DOI":"10.1007\/s00521-024-09538-9","volume":"36","author":"S Kili\u00e7arslan","year":"2024","unstructured":"Kili\u00e7arslan S, Celik M (2024) Parametric RSigELU: a new trainable activation function for deep learning. Neural Comput Appl 36(13):7595\u20137607","journal-title":"Neural Comput Appl"},{"key":"7164_CR21","unstructured":"Misra D (2019) Mish: a self regularized non-monotonic activation function. arXiv preprint arXiv:1908.08681"},{"key":"7164_CR22","doi-asserted-by":"publisher","first-page":"48","DOI":"10.1016\/j.neunet.2022.01.001","volume":"148","author":"G Bingham","year":"2022","unstructured":"Bingham G, Miikkulainen R (2022) Discovering parametric activation functions. Neural Netw 148:48\u201365","journal-title":"Neural Netw"},{"issue":"2","key":"7164_CR23","doi-asserted-by":"publisher","first-page":"136","DOI":"10.1049\/cvi2.12020","volume":"15","author":"X Liu","year":"2021","unstructured":"Liu X, Di X (2021) TanhExp: a smooth activation function with high convergence speed for lightweight neural networks. IET Comput Vis 15(2):136\u2013150","journal-title":"IET Comput Vis"},{"key":"7164_CR24","volume-title":"Early TraNSCENDENTalS","author":"GB Thomas Jr","year":"2014","unstructured":"Thomas GB Jr et al (2014) Early TraNSCENDENTalS. WH Freeman, San Francisco"},{"key":"7164_CR25","volume":"577","author":"T Jia","year":"2024","unstructured":"Jia T, Jiang R, Fu Z, Xie Z, Ding X, Wang Z (2024) Optimization and inverse design of optical activation functions based on neural networks. Opt Commun 577:123200","journal-title":"Opt Commun"},{"key":"7164_CR26","first-page":"1","volume":"25","author":"S Zhang","year":"2024","unstructured":"Zhang S, Lu J, Zhao H (2024) Deep network approximation: beyond ReLU to diverse activation functions. J Mach Learn Res 25:1\u201339","journal-title":"J Mach Learn Res"},{"key":"7164_CR27","doi-asserted-by":"publisher","first-page":"292","DOI":"10.1007\/s11227-023-05441-7","volume":"80","author":"RHK Emanuel","year":"2024","unstructured":"Emanuel RHK, Docherty PD, Lunt H, M\u00f6ller K (2024) The effect of activation functions on accuracy, convergence speed, and misclassification confidence in CNN text classification: a comprehensive exploration. J Supercomput 80:292\u2013312","journal-title":"J Supercomput"},{"key":"7164_CR28","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1007\/springer-nature.2023.1234567","volume":"2023","author":"M Kaytan","year":"2023","unstructured":"Kaytan M, Aydilek \u0130B, Yeroglu C (2023) Gish: a novel activation function for image classification. Springer Nature 2023:1\u201310. https:\/\/doi.org\/10.1007\/springer-nature.2023.1234567","journal-title":"Springer Nature"},{"key":"7164_CR29","doi-asserted-by":"publisher","first-page":"142537","DOI":"10.1109\/ACCESS.2023.3315308","volume":"11","author":"H-S Feng","year":"2023","unstructured":"Feng H-S, Yang C-H (2023) PolyLU: a simple and robust polynomial-based linear unit activation function for deep learning. IEEE Access 11:142537\u2013142548. https:\/\/doi.org\/10.1109\/ACCESS.2023.3315308","journal-title":"IEEE Access"},{"key":"7164_CR30","volume":"18","author":"B Singh","year":"2023","unstructured":"Singh B, Patel S, Vijayvargiya A, Kumar R (2023) Analyzing the impact of activation functions on the performance of the data-driven gait model. Res Eng 18:101029","journal-title":"Res Eng"}],"container-title":["The Journal of Supercomputing"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11227-025-07164-3.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s11227-025-07164-3\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11227-025-07164-3.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2025,3,29]],"date-time":"2025-03-29T05:14:22Z","timestamp":1743225262000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s11227-025-07164-3"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2025,3,26]]},"references-count":30,"journal-issue":{"issue":"5","published-online":{"date-parts":[[2025,4]]}},"alternative-id":["7164"],"URL":"https:\/\/doi.org\/10.1007\/s11227-025-07164-3","relation":{},"ISSN":["1573-0484"],"issn-type":[{"value":"1573-0484","type":"electronic"}],"subject":[],"published":{"date-parts":[[2025,3,26]]},"assertion":[{"value":"6 March 2025","order":1,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"26 March 2025","order":2,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors declare no competing interests.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflict of interest"}},{"value":"This manuscript has not been published nor is it currently under consideration for publication elsewhere.","order":3,"name":"Ethics","group":{"name":"EthicsHeading","label":"Ethical responsibilities"}}],"article-number":"661"}}