{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2026,5,1]],"date-time":"2026-05-01T21:00:10Z","timestamp":1777669210721,"version":"3.51.4"},"reference-count":28,"publisher":"Springer Science and Business Media LLC","issue":"2","license":[{"start":{"date-parts":[[2021,12,1]],"date-time":"2021-12-01T00:00:00Z","timestamp":1638316800000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.springer.com\/tdm"},{"start":{"date-parts":[[2021,12,1]],"date-time":"2021-12-01T00:00:00Z","timestamp":1638316800000},"content-version":"vor","delay-in-days":0,"URL":"https:\/\/www.springer.com\/tdm"}],"funder":[{"DOI":"10.13039\/501100004488","name":"hrvatska zaklada za znanost","doi-asserted-by":"publisher","award":["UIP-2017-05-4042"],"award-info":[{"award-number":["UIP-2017-05-4042"]}],"id":[{"id":"10.13039\/501100004488","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":["link.springer.com"],"crossmark-restriction":false},"short-container-title":["Intel Serv Robotics"],"published-print":{"date-parts":[[2022,4]]},"DOI":"10.1007\/s11370-021-00401-7","type":"journal-article","created":{"date-parts":[[2021,12,1]],"date-time":"2021-12-01T20:02:37Z","timestamp":1638388957000},"page":"193-201","update-policy":"https:\/\/doi.org\/10.1007\/springer_crossmark_policy","source":"Crossref","is-referenced-by-count":18,"title":["Pepper to fall: a perception method for sweet pepper robotic harvesting"],"prefix":"10.1007","volume":"15","author":[{"ORCID":"https:\/\/orcid.org\/0000-0003-0944-0160","authenticated-orcid":false,"given":"Marsela","family":"Polic","sequence":"first","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0003-0655-4023","authenticated-orcid":false,"given":"Jelena","family":"Tabak","sequence":"additional","affiliation":[]},{"ORCID":"https:\/\/orcid.org\/0000-0002-9483-9282","authenticated-orcid":false,"given":"Matko","family":"Orsag","sequence":"additional","affiliation":[]}],"member":"297","published-online":{"date-parts":[[2021,12,1]]},"reference":[{"key":"401_CR1","unstructured":"Orsag M et al (2021) Specularia. https:\/\/sites.google.com\/view\/specularia-pepper-picking, Accessed: 2021-03-31"},{"issue":"6","key":"401_CR2","doi-asserted-by":"publisher","first-page":"888","DOI":"10.1002\/rob.21525","volume":"31","author":"CW Bac","year":"2014","unstructured":"Bac CW, van Henten EJ, Hemming J, Edan Y (2014) Harvesting robots for high-value crops: state-of-the-art review and challenges ahead. J Field Robot 31(6):888\u2013911","journal-title":"J Field Robot"},{"key":"401_CR3","doi-asserted-by":"publisher","first-page":"71","DOI":"10.1016\/j.biosystemseng.2015.12.001","volume":"146","author":"R Barth","year":"2016","unstructured":"Barth R, Hemming J, van Henten EJ (2016) Design of an eye-in-hand sensing and servo control framework for harvesting robotics in dense vegetation. Biosys Eng 146:71\u201384","journal-title":"Biosys Eng"},{"key":"401_CR4","doi-asserted-by":"publisher","first-page":"105687","DOI":"10.1016\/j.compag.2020.105687","volume":"177","author":"L Fu","year":"2020","unstructured":"Fu L, Gao F, Wu J, Li R, Karkee M, Zhang Q (2020) Application of consumer rgb-d cameras for fruit detection and localization in field: a critical review. Comput Electron Agric 177:105687","journal-title":"Comput Electron Agric"},{"issue":"1","key":"401_CR5","doi-asserted-by":"publisher","first-page":"1","DOI":"10.1007\/s11119-019-09654-w","volume":"21","author":"G Lin","year":"2020","unstructured":"Lin G, Tang Y, Zou X, Xiong J, Fang Y (2020) Color-, depth-, and shape-based 3d fruit detection. Precision Agric 21(1):1\u201317","journal-title":"Precision Agric"},{"issue":"5","key":"401_CR6","doi-asserted-by":"publisher","first-page":"1072","DOI":"10.1007\/s11119-020-09709-3","volume":"21","author":"S Tu","year":"2020","unstructured":"Tu S, Pang J, Liu H, Zhuang N, Chen Y, Zheng C, Wan H, Xue Y (2020) Passion fruit detection and counting based on multiple scale faster r-cnn using rgb-d images. Precision Agric 21(5):1072\u20131091","journal-title":"Precision Agric"},{"issue":"12","key":"401_CR7","doi-asserted-by":"publisher","first-page":"2738","DOI":"10.3390\/s17122738","volume":"17","author":"Z Wang","year":"2017","unstructured":"Wang Z, Walsh KB, Verma B (2017) On-tree mango fruit size estimation using rgb-d images. Sensors 17(12):2738","journal-title":"Sensors"},{"key":"401_CR8","doi-asserted-by":"publisher","first-page":"105384","DOI":"10.1016\/j.compag.2020.105384","volume":"173","author":"J Zhang","year":"2020","unstructured":"Zhang J, Karkee M, Zhang Q, Zhang X, Yaqoob M, Fu L, Wang S (2020) Multi-class object detection using faster r-cnn and estimation of shaking locations for automated shake-and-catch apple harvesting. Comput Electron Agric 173:105384","journal-title":"Comput Electron Agric"},{"key":"401_CR9","doi-asserted-by":"publisher","first-page":"33","DOI":"10.1016\/j.biosystemseng.2016.01.007","volume":"146","author":"TT Nguyen","year":"2016","unstructured":"Nguyen TT, Vandevoorde K, Wouters N, Kayacan E, De Baerdemaeker JG, Saeys W (2016) Detection of red and bicoloured apples on tree with an rgb-d camera. Biosys Eng 146:33\u201344","journal-title":"Biosys Eng"},{"key":"401_CR10","doi-asserted-by":"publisher","first-page":"231","DOI":"10.1016\/j.compag.2017.05.014","volume":"139","author":"RM Perez","year":"2017","unstructured":"Perez RM, Cheein FA, Rosell-Polo JR (2017) Flexible system of multiple rgb-d sensors for measuring and classifying fruits in agri-food industry. Comput Electron Agric 139:231\u2013242","journal-title":"Comput Electron Agric"},{"key":"401_CR11","doi-asserted-by":"publisher","first-page":"67","DOI":"10.1016\/j.compag.2016.01.018","volume":"122","author":"D Andujar","year":"2016","unstructured":"Andujar D, Ribeiro A, Fern\u00e1ndez-Quintanilla C, Dorado J (2016) Using depth cameras to extract structural parameters to assess the growth state and yield of cauliflower crops. Comput Electron Agric 122:67\u201373","journal-title":"Comput Electron Agric"},{"key":"401_CR12","doi-asserted-by":"publisher","first-page":"293","DOI":"10.1016\/j.compag.2018.11.026","volume":"156","author":"A Milella","year":"2019","unstructured":"Milella A, Marani R, Petitti A, Reina G (2019) In-field high throughput grapevine phenotyping with a consumer-grade depth camera. Comput Electron Agric 156:293\u2013306","journal-title":"Comput Electron Agric"},{"issue":"2","key":"401_CR13","doi-asserted-by":"publisher","first-page":"872","DOI":"10.1109\/LRA.2017.2655622","volume":"2","author":"C Lehnert","year":"2017","unstructured":"Lehnert C, English A, McCool C, Tow AW, Perez T (2017) Autonomous sweet pepper harvesting for protected cropping systems. IEEE Robot Autom Lett 2(2):872\u2013879","journal-title":"IEEE Robot Autom Lett"},{"key":"401_CR14","doi-asserted-by":"publisher","first-page":"105302","DOI":"10.1016\/j.compag.2020.105302","volume":"171","author":"H Kang","year":"2020","unstructured":"Kang H, Chen C (2020) Fruit detection, segmentation and 3d visualisation of environments in apple orchards. Comput Electron Agric 171:105302","journal-title":"Comput Electron Agric"},{"key":"401_CR15","doi-asserted-by":"crossref","unstructured":"Arad B, Kurtser P, Barnea E, Harel B, Edan Y, Ben-Shahar O (2019) Controlled lighting and illumination-independent target detection for real-time cost-efficient applications. the case study of sweet pepper robotic harvesting. Sensors,19(6): 1390","DOI":"10.3390\/s19061390"},{"key":"401_CR16","doi-asserted-by":"crossref","unstructured":"Lehnert C, Sa I, McCool C, Upcroft B, Perez T (2016) Sweet pepper pose detection and grasping for automated crop harvesting. In: 2016 IEEE international conference on robotics and automation (ICRA), pp 2428\u20132434, IEEE","DOI":"10.1109\/ICRA.2016.7487394"},{"issue":"2","key":"401_CR17","doi-asserted-by":"publisher","first-page":"765","DOI":"10.1109\/LRA.2017.2651952","volume":"2","author":"I Sa","year":"2017","unstructured":"Sa I, Lehnert C, English A, McCool C, Dayoub F, Upcroft B, Perez T (2017) Peduncle detection of sweet pepper for autonomous crop harvesting-combined color and 3-d information. IEEE Robot Autom Lett 2(2):765\u2013772","journal-title":"IEEE Robot Autom Lett"},{"key":"401_CR18","doi-asserted-by":"crossref","unstructured":"Hinterstoisser S, Pauly O, Heibel H, Martina M, Bokeloh M (2019) An annotation saved is an annotation earned: using fully synthetic training for object detection. In: Proceedings of the IEEE\/CVF international conference on computer vision workshops","DOI":"10.1109\/ICCVW.2019.00340"},{"key":"401_CR19","unstructured":"Khan S, Phan B, Salay R, Czarnecki K (2019) Procsy: Procedural synthetic dataset generation towards influence factor studies of semantic segmentation networks. In: CVPR workshops, pp 88\u201396"},{"key":"401_CR20","doi-asserted-by":"crossref","unstructured":"Di Cicco M, Potena C, Grisetti G, Pretto A (2017) Automatic model based dataset generation for fast and accurate crop and weeds detection. In: 2017 IEEE\/RSJ international conference on intelligent robots and systems (IROS), pp 5188\u20135195, IEEE","DOI":"10.1109\/IROS.2017.8206408"},{"key":"401_CR21","doi-asserted-by":"publisher","first-page":"105699","DOI":"10.1016\/j.compag.2020.105699","volume":"177","author":"J Olatunji","year":"2020","unstructured":"Olatunji J, Redding G, Rowe C, East A (2020) Reconstruction of kiwifruit fruit geometry using a cgan trained on a synthetic dataset. Comput Electron Agric 177:105699","journal-title":"Comput Electron Agric"},{"key":"401_CR22","doi-asserted-by":"publisher","first-page":"106064","DOI":"10.1016\/j.compag.2021.106064","volume":"183","author":"K Zhang","year":"2021","unstructured":"Zhang K, Wu Q, Chen Y (2021) Detecting soybean leaf disease from synthetic image using multi-feature fusion faster r-cnn. Comput Electron Agric 183:106064","journal-title":"Comput Electron Agric"},{"key":"401_CR23","doi-asserted-by":"publisher","first-page":"284","DOI":"10.1016\/j.compag.2017.12.001","volume":"144","author":"R Barth","year":"2018","unstructured":"Barth R, Isselmuiden J, Hemming J, Van Henten EJ (2018) Data synthesis methods for semantic segmentation in agriculture: a capsicum annuum dataset. Comput Electron Agric 144:284\u2013296","journal-title":"Comput Electron Agric"},{"key":"401_CR24","doi-asserted-by":"crossref","unstructured":"Liu W, Anguelov D, Erhan D, Szegedy C, Reed S, Fu C-Y, Berg AC (2016) Ssd: Single shot multibox detector. In: European conference on computer vision, pp 21\u201337, Springer","DOI":"10.1007\/978-3-319-46448-0_2"},{"key":"401_CR25","doi-asserted-by":"crossref","unstructured":"Sandler M, Howard A, Zhu M, Zhmoginov A, Chen L-C (2018) Mobilenetv2: Inverted residuals and linear bottlenecks. In Proceedings of the IEEE conference on computer vision and pattern recognition, pp 4510\u20134520","DOI":"10.1109\/CVPR.2018.00474"},{"key":"401_CR26","doi-asserted-by":"crossref","unstructured":"Maric B, Polic M, Tabak T, Orsag M (2020) Unsupervised optimization approach to in situ calibration of collaborative human-robot interaction tools. In: 2020 IEEE international conference on multisensor fusion and integration for intelligent systems (MFI), pp 255\u2013262, IEEE","DOI":"10.1109\/MFI49285.2020.9235229"},{"key":"401_CR27","unstructured":"Hess R (2010) Blender foundations: the essential guide to learning blender 2.6. Focal Press"},{"key":"401_CR28","unstructured":"AliceVision, Meshroom: A 3D reconstruction software., 2018"}],"container-title":["Intelligent Service Robotics"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11370-021-00401-7.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/article\/10.1007\/s11370-021-00401-7\/fulltext.html","content-type":"text\/html","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/link.springer.com\/content\/pdf\/10.1007\/s11370-021-00401-7.pdf","content-type":"application\/pdf","content-version":"vor","intended-application":"similarity-checking"}],"deposited":{"date-parts":[[2022,5,25]],"date-time":"2022-05-25T16:21:50Z","timestamp":1653495710000},"score":1,"resource":{"primary":{"URL":"https:\/\/link.springer.com\/10.1007\/s11370-021-00401-7"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,12,1]]},"references-count":28,"journal-issue":{"issue":"2","published-print":{"date-parts":[[2022,4]]}},"alternative-id":["401"],"URL":"https:\/\/doi.org\/10.1007\/s11370-021-00401-7","relation":{},"ISSN":["1861-2776","1861-2784"],"issn-type":[{"value":"1861-2776","type":"print"},{"value":"1861-2784","type":"electronic"}],"subject":[],"published":{"date-parts":[[2021,12,1]]},"assertion":[{"value":"15 July 2021","order":1,"name":"received","label":"Received","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"6 November 2021","order":2,"name":"accepted","label":"Accepted","group":{"name":"ArticleHistory","label":"Article History"}},{"value":"1 December 2021","order":3,"name":"first_online","label":"First Online","group":{"name":"ArticleHistory","label":"Article History"}},{"order":1,"name":"Ethics","group":{"name":"EthicsHeading","label":"Declarations"}},{"value":"The authors have no relevant financial or non-financial interests to disclose.","order":2,"name":"Ethics","group":{"name":"EthicsHeading","label":"Conflicts of interests"}},{"value":"The code developed and used in this work is publically available within the Laboratory GitHub repository.","order":3,"name":"Ethics","group":{"name":"EthicsHeading","label":"Code availability"}},{"value":"Not applicable.","order":4,"name":"Ethics","group":{"name":"EthicsHeading","label":"Ethics approval"}},{"value":"Not applicable.","order":5,"name":"Ethics","group":{"name":"EthicsHeading","label":"Consent to participate"}},{"value":"Not applicable.","order":6,"name":"Ethics","group":{"name":"EthicsHeading","label":"Consent for publication"}}]}}