<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article article-type="review-article" dtd-version="2.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Manuf. Technol.</journal-id>
<journal-title>Frontiers in Manufacturing Technology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Manuf. Technol.</abbrev-journal-title>
<issn pub-type="epub">2813-0359</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1282843</article-id>
<article-id pub-id-type="doi">10.3389/fmtec.2023.1282843</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Manufacturing Technology</subject>
<subj-group>
<subject>Review</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Leveraging I4.0 smart methodologies for developing solutions for harvesting produce</article-title>
<alt-title alt-title-type="left-running-head">Recchia and Urbanic</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmtec.2023.1282843">10.3389/fmtec.2023.1282843</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name>
<surname>Recchia</surname>
<given-names>Ava</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2313724/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Urbanic</surname>
<given-names>Jill</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1498948/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>Industrial Engineering</institution>, <institution>University of Windsor</institution>, <addr-line>Windsor</addr-line>, <addr-line>ON</addr-line>, <country>Canada</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Mechanical, Automotive and Materials Engineering</institution>, <institution>University of Windsor</institution>, <addr-line>Windsor</addr-line>, <addr-line>ON</addr-line>, <country>Canada</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>
<bold>Edited by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/137477/overview">Weidong Li</ext-link>, Coventry University, United Kingdom</p>
</fn>
<fn fn-type="edited-by">
<p>
<bold>Reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2424110/overview">Roberto Simoni</ext-link>, Federal University of Santa Catarina, Brazil</p>
<p>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2510796/overview">Antonio Carlos Valdiero</ext-link>, Federal University of Santa Catarina, Brazil</p>
</fn>
<corresp id="c001">&#x2a;Correspondence: Jill Urbanic, <email>jurbanic@uwindsor.ca</email>
</corresp>
</author-notes>
<pub-date pub-type="epub">
<day>15</day>
<month>12</month>
<year>2023</year>
</pub-date>
<pub-date pub-type="collection">
<year>2023</year>
</pub-date>
<volume>3</volume>
<elocation-id>1282843</elocation-id>
<history>
<date date-type="received">
<day>24</day>
<month>08</month>
<year>2023</year>
</date>
<date date-type="accepted">
<day>29</day>
<month>11</month>
<year>2023</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2023 Recchia and Urbanic.</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Recchia and Urbanic</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>Leveraging Computer-Aided Design (CAD) and Manufacturing (CAM) tools with advanced Industry 4.0 (I4.0) technologies presents numerous opportunities for industries to optimize processes, improve efficiency, and reduce costs. While certain sectors have achieved success in this effort, others, including agriculture, are still in the early stages of implementation. The focus of this research paper is to explore the potential of I4.0 technologies and CAD/CAM tools in the development of pick and place solutions for harvesting produce. Key technologies driving this include Internet of Things (IoT), machine learning (ML), deep learning (DL), robotics, additive manufacturing (AM), and simulation. Robots are often utilized as the main mechanism for harvesting operations. AM rapid prototyping strategies assist with designing specialty end-effectors and grippers. ML and DL algorithms allow for real-time object and obstacle detection. A comprehensive review of the literature is presented with a summary of the recent state-of-the-art I4.0 solutions in agricultural harvesting and current challenges/barriers to I4.0 adoption and integration with CAD/CAM tools and processes. A framework has also been developed to facilitate future CAD/CAM research and development for agricultural harvesting in the era of I4.0.</p>
</abstract>
<kwd-group>
<kwd>internet of things</kwd>
<kwd>industry 4.0</kwd>
<kwd>CAD</kwd>
<kwd>CAM</kwd>
<kwd>produce harvesting</kwd>
<kwd>automated harvesting</kwd>
</kwd-group>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Software Technologies</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1">
<title>1 Introduction</title>
<p>Industry 4.0 technologies are revolutionizing the agricultural sector into a new era of data-driven practices that are transforming the way crops are produced, harvested, and distributed. This transformation is paramount given that by 2050 the world&#x2019;s population is estimated to reach 10 billion, necessitating a 70% increase in food production (<xref ref-type="bibr" rid="B20">De Clercq et al., 2018</xref>). Internet of Things (IoT), artificial intelligence (AI), advanced robotics, simulation, machine learning (ML), deep learning (DL), and additive manufacturing (AM) are examples of the transformative technologies driving this shift. Their implementation across the sector&#x2019;s value chain has the potential to enhance competitiveness by increasing yields, improving input efficiency, and reducing financial and time expenses from human mistakes (<xref ref-type="bibr" rid="B33">Javaid et al., 2022</xref>).</p>
<p>A representative value chain for agriculture consisting of pre-production, production, processing, distribution, retail, and final consumption stages is shown in <xref ref-type="fig" rid="F1">Figure 1</xref>. The pre-production process involves suppliers of seeds, fertilizers, pesticides, and other inputs or services (<xref ref-type="bibr" rid="B61">Paunov and Satorra, 2019</xref>). Activities within the production process include sowing, irrigation, pest management, fertilization, and harvesting. Following this, the processing stage converts the crops into consumer ready states by means of cleaning, sorting, grading, and or slicing. The final product is then packaged or canned, stored, and distributed to retail stores, grocery markets, and supermarkets. Lastly, it is purchased by the consumer for end use. The primary subject of this review is on the application of I4.0 technologies within the production stage, with a particular focus on harvesting.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption>
<p>Representative agriculture value chain.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g001.tif"/>
</fig>
<p>Harvesting is a labour-intensive task, and the agricultural industry is currently experiencing a severe labour crisis. In a 2019 report from the Government of Canada, an estimated 13% of fruits and vegetables grown are either left unharvested or discarded post harvest from being picked past their prime (<xref ref-type="bibr" rid="B11">Canada, 2019</xref>; <xref ref-type="bibr" rid="B86">VCMI, 2019</xref>). Other countries are facing similar labour struggles, such as Japan, New-Zealand, Netherlands, Ireland, Spain, and The United States (<xref ref-type="bibr" rid="B69">Ryan, 2023</xref>). Recent research efforts have aimed at addressing these issues by exploring automation strategies and new methods for smart farming using I4.0 technologies. The development of IoT-based monitoring systems, robotic harvesting solutions, and advanced object-recognition algorithms are promoting new ways to avoid produce loss, increase crop yields, and optimize resources (<xref ref-type="bibr" rid="B48">Liu et al., 2021</xref>).</p>
<p>Computer-aided design (CAD) and manufacturing (CAM) play a vital role in the success and optimization of this emerging agricultural era. The complexity of the harvesting environment poses unique challenges for automation which prevents the direct translation of solutions from other domains. This stems from the variability of the produce (size, shape, colour), crop objects (leaves, stems), and the crop environment (<xref ref-type="bibr" rid="B5">Bac et al., 2014</xref>). Leveraging advanced CAD and CAM tools presents opportunities to reverse engineer agricultural environments to effectively work with this variability. For instance, CAD tools support the design of specialty compliant end-of-arm tooling (EOAT) to grasp and manipulate different crop types. Additive manufacturing (AM) enables rapid prototyping for testing and validation of EOAT and promotes complexity within their designs. Compliant mechanisms mitigate uncertainty by conforming to objects of various geometries via compliant materials and structures (<xref ref-type="bibr" rid="B68">Rus and Tolley, 2015</xref>). Elastomers are an extensively used compliant material due to their ability to sustain large strains without permanent deformation (<xref ref-type="bibr" rid="B75">Shintake et al., 2018</xref>). Common actuation methods for compliant mechanisms (<xref ref-type="fig" rid="F2">Figure 2</xref>) include contact-driven, cable-driven, and fluid-driven. These solutions can be readily realized using AM processes to build the EOAT directly, or by exploiting rapid tooling strategies to fabricate molds, and using over-molding strategies to embed elements (<xref ref-type="bibr" rid="B57">Odhner et al., 2014</xref>).</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption>
<p>Actuation methods of compliant mechanisms <bold>(A)</bold> contact-driven from &#x00A9; 2018 <xref ref-type="bibr" rid="B75">Shintake et al. (2018)</xref>. Published by WILEY-VCH Verlag GmbH &#x0026; Co. KGaA, Weinheim. Licensed under CC BY-NC-ND 4.0. doi: <ext-link ext-link-type="uri" xlink:href="http://10.1002/adma.201707035">10.1002/adma.201707035</ext-link> <bold>(B)</bold> cable-driven from &#x00A9; 2018 <xref ref-type="bibr" rid="B75">Shintake et al. (2018)</xref>. Published by WILEY-VCH Verlag GmbH &#x0026; Co. KGaA, Weinheim. Licensed under CC BY-NC-ND 4.0. doi: <ext-link ext-link-type="uri" xlink:href="http://10.1002/adma.201707035">10.1002/adma.201707035</ext-link> <bold>(C)</bold> fluid-driven from &#x00A9; 2018 by <xref ref-type="bibr" rid="B100">Hu et al. (2018)</xref>. Licensee MDPI, Basel, Switzerland. Licensed under CC BY. doi: <ext-link ext-link-type="uri" xlink:href="http://10.3390/robotics7020024">10.3390/robotics7020024</ext-link>.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g002.tif"/>
</fig>
<p>Further, CAD facilitates the design of robotic rigid-body structures and other auxiliary components to ensure safe navigation in the specified crop environment. Robust simulation models capable of capturing the environmental variability can assist with testing, validation, and optimization of solutions prior to commercialization. Moreover, integrating IoT networks and robotic harvesters presents opportunities for improved harvest decisions, trajectory planning, and ideal time to harvest, all of which represent methods leveraged by CAM systems.</p>
<p>There are several CAD modeling approaches that can be explored to effectively represent the objects within a complex crop environment. The classic contemporary approach is to use boundary rep (B-rep) models (<xref ref-type="fig" rid="F3">Figure 3A</xref>) with a constructive solid geometry history tree. Euler&#x2019;s operators, rules for identifying loops, and parametric curves and surfaces (spline, B&#xe9;zier, nonuniform rational B-splines (NURBS)) are standard to provide sophisticated design solutions (<xref ref-type="fig" rid="F3">Figure 3B</xref>).</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption>
<p>
<bold>(A)</bold> B-rep model which is constructed from vertices, edges, and faces, and <bold>(B)</bold> NURBs surface with UV flow lines and selected control points.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g003.tif"/>
</fig>
<p>The present tools allow for effective design of mechanical components and complex surfaces or solid models but for complex &#x2018;random&#x2019; shapes, decomposition models are applied. Volumetric representation techniques offer realistic 3D depictions of objects, incorporating depth and other geometric features that are difficult to capture in 2D-based representations. Common approaches include point clouds (<xref ref-type="fig" rid="F4">Figure 4</xref>), voxel grids, octrees, and signed distance fields (SDFs). 3D point clouds, when sampled uniformly, offer the ability to preserve original geometric information without any discretization (<xref ref-type="bibr" rid="B26">Guo et al., 2021</xref>).</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption>
<p>
<bold>(A)</bold> Point cloud data for a lung, <bold>(B)</bold> slicing interval used to select points to extract a spline curve, and <bold>(C)</bold> an editable CAD model. (<xref ref-type="bibr" rid="B44">Kokab and Urbanic, 2019</xref>).</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g004.tif"/>
</fig>
<p>3D point cloud crop models have been generated directly from Light Detection and Ranging (LiDAR) systems or obtained from Unmanned Aerial Vehicles (UAVs), multispectral, and thermal imagery using photogrammetry software or other computer vision algorithms (<xref ref-type="bibr" rid="B40">Khanna et al., 2015</xref>; <xref ref-type="bibr" rid="B16">Comba et al., 2019</xref>). Point cloud to mesh to surface modeling is a standard approach for reverse engineering (<xref ref-type="bibr" rid="B85">V&#xe1;rady et al., 1997</xref>). Point cloud to solid model activities have occurred for reverse engineering (<xref ref-type="bibr" rid="B84">Urbanic and Elmaraghy, 2008</xref>) and developing bio-medical models (<xref ref-type="bibr" rid="B44">Kokab and Urbanic, 2019</xref>).</p>
<p>Irregular 3D point cloud data is often transformed into regular data formats, such as 3D voxel grids, for downstream analysis in deep learning architectures (<xref ref-type="bibr" rid="B13">Charles et al., 2017</xref>) and other analytical techniques. Voxelization of point clouds discretizes the data, forming grids in 3D space where each voxel defines individual values. <xref ref-type="bibr" rid="B15">Christiansen et al. (2017)</xref> developed a model of this type for a winter wheat field to assist with crop height estimations (<xref ref-type="fig" rid="F5">Figure 5</xref>). However, large voxel grids can require significant computational and memory resources. Octrees offer improved memory efficiency with their hierarchal structure. With this method, each voxel can be divided up to eight times and only voxels that are occupied are initialized whereas uninitialized voxels may represent an empty or unknown space (<xref ref-type="bibr" rid="B30">Hornung et al., 2013</xref>).</p>
<fig id="F5" position="float">
<label>FIGURE 5</label>
<caption>
<p>Winter wheat field <bold>(A)</bold> experimental representation and <bold>(B)</bold> voxel-grid representation. From &#x00A9; 2017 <xref ref-type="bibr" rid="B15">Christiansen et al. (2017)</xref>. Licensee MDPI, Basel, Switzerland. Licensed under CC BY. doi: <ext-link ext-link-type="uri" xlink:href="http://10.3390/s17122703">10.3390/s17122703</ext-link>.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g005.tif"/>
</fig>
<p>SDFs are also a more efficient form of voxel-based representation that specifies the distance from any point in 3D space to the boundary of an object (<xref ref-type="bibr" rid="B24">Frisken and Perry, 2006</xref>). A sign is designated to each point relative to the boundary, where a negative sign is attributed to points within the boundary and a positive sign to points outside of the boundary (<xref ref-type="bibr" rid="B35">Jones et al., 2006</xref>). Two common specialized forms include Euclidean Signed Distance Fields (ESDFs) and Truncated Signed Distance Fields (TSDFs). ESDFs (<xref ref-type="fig" rid="F6">Figure 6</xref>) contain the Euclidean distance to the nearest occupied voxel for every voxel, whereas TSDFs incorporate a short truncation radius surrounding the boundary, allowing for more efficient construction and noise filtering (<xref ref-type="bibr" rid="B59">Oleynikova et al., 2017</xref>). These model types have been applied in agriculture. For example, an octree-based map in the form of TSDF for a sweet pepper environment was developed by <xref ref-type="bibr" rid="B50">Marangoz et al. (2022)</xref> to estimate produce shapes.</p>
<fig id="F6" position="float">
<label>FIGURE 6</label>
<caption>
<p>Explicit boundary representation (left) and implicit boundary representation via Euclidean SDF (right). Reproduced with permission. <xref ref-type="bibr" rid="B62">Peelar et al. (2019)</xref>. Copyright 2019, CAD Solutions.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g006.tif"/>
</fig>
<p>Volumetric representations of crops and their environment can become large and complex. Simplified representations can be achieved by skeletonizing volumetric data. Voronoi skeleton models (VSKs) based on the Voronoi diagrams (VDs) from boundary line segments of a shape preserve both it is geometrical and topical information (<xref ref-type="bibr" rid="B45">Langer et al., 2019</xref>). <xref ref-type="bibr" rid="B58">Ogniewicz and Ilg (1992)</xref> applied this approach with thresholding to assign residual values to each VD boundary that indicated its important to the overall skeleton of 2D shapes. This is useful for complex objects that exhibit a large number of skeleton branches. The concept of VSKs have also been exercised for 3D shapes (<xref ref-type="bibr" rid="B56">N&#xe4;f et al., 1997</xref>; <xref ref-type="bibr" rid="B29">Hisada et al., 2001</xref>). Modelling agricultural environments and crop objects using VSKs has not been extensively researched. Considering clustered crops with relatively simple geometries, such as mushrooms, blueberries, or grapes, VSK representation models have potential to infer boundaries between the individual clustered objects. Solutions that incorporate this representation type should be developed in the agricultural domain to validate its applicability.</p>
<p>This review presents the current state of the art for the major I4.0 technologies employed in agricultural harvesting applications from studies published between 2019 to the present day. Technologies include Internet of Things, artificial intelligence, machine learning, deep learning, and advanced robotics. Applications of CAD/CAM tools and their integration with I4.0-based solutions will also be discussed.</p>
</sec>
<sec id="s2">
<title>2 Internet of Things</title>
<p>Internet of Things (IoT) is a network of physical devices and technologies that facilitate data collection, communication, and remote sensing and control (<xref ref-type="bibr" rid="B51">McKinsey and Company, 2022</xref>). As described by <xref ref-type="bibr" rid="B21">Elijah et al. (2018)</xref>, its architecture can be classified into four general components: (1) IoT devices; (2) communication technology; (3) Internet; and (4) data storage and processing. The IoT devices are responsible for collecting data in real-time and include sensors, unmanned aerial vehicles (UAVs), ground robotics, and other appliances. Communication technologies enable data exchange between the IoT devices and the data storage and analytics. These are either wired or wireless mediums. In agriculture, the most commonly used wireless communication devices include Cellular, 6LoWPAN (IPv6 over Low power Wireless Personal Area Network), ZigBee, Bluetooth, RFID (Radio Frequency Identification), Wi-Fi and LoRaWAN (<xref ref-type="bibr" rid="B73">Shafi et al., 2019</xref>). An experimental comparison study demonstrated that LoRaWAN systems would be most ideal for agricultural applications since it demonstrated greater longevity compared to Zigbee and Wi-Fi systems (<xref ref-type="bibr" rid="B70">Sadowski and Spachos, 2020</xref>). The internet serves as the connective foundation of IoT systems and facilitates remote access to the data. Extensive data collection requires the use of storage methodologies, such as clouds, and advanced algorithms for processing (<xref ref-type="bibr" rid="B21">Elijah et al., 2018</xref>). Forms of big data analytics are often employed.</p>
<p>The four components can be further broken down into layers to promote communication, management, and analytical capabilities. For example, <xref ref-type="bibr" rid="B74">Shi et al. (2019)</xref> presented a 5-layer IoT structure in agriculture containing a perception (physical devices), network (wired or wireless communication mediums), middleware (data aggregation), common platform (data storage and analytics), and application layer (management platforms and systems).</p>
<p>Agricultural crop growth and ideal harvest times are significantly influenced by environmental parameters, such as temperature, humidity, CO<sub>2</sub> levels, sunlight, and water levels (<xref ref-type="bibr" rid="B101">Sishodia et al., 2020</xref>; <xref ref-type="bibr" rid="B66">Rodr&#xed;guez et al., 2021</xref>). IoT-based systems enable real-time monitoring of these parameters to assist farm management and operation. Several studies have focused on the development of complete IoT-based systems for protected and open-field crop environments. Many indicated more efficient use of inputs (<xref ref-type="bibr" rid="B98">Zamora-Izquierdo et al., 2019</xref>; <xref ref-type="bibr" rid="B67">Rodr&#xed;guez-Robles et al., 2020</xref>), the potential to remotely control environmental conditions (<xref ref-type="bibr" rid="B82">Thilakarathne et al., 2023</xref>) and the ability to enhance farm management practices (<xref ref-type="bibr" rid="B52">Mekala and Viswanathan, 2019</xref>; <xref ref-type="bibr" rid="B66">Rodr&#xed;guez et al., 2021</xref>) based on the system&#x2019;s data-driven reports. Limitations concerning power supply, stable connections, and extensibility were highlighted by <xref ref-type="bibr" rid="B82">Thilakarathne et al. (2023)</xref>. Currently, all these systems are in a prototype phase. However, other sources have presented IoT-based systems that have achieved a commercial product stage. For instance, Croptracker (<xref ref-type="bibr" rid="B18">Croptracker, 2023</xref>), CropX (<xref ref-type="bibr" rid="B19">CropX inc, 2022</xref>) and Semios (<xref ref-type="bibr" rid="B71">Semios, 2023</xref>), are commercially available solutions.</p>
<p>Monitoring individual crop characteristics throughout growth can support the subsequent planning of actions performed by autonomous robotic solutions (<xref ref-type="bibr" rid="B41">Kierdorf et al., 2023</xref>), including establishing harvest priorities. Real-time machine learning classification and growth prediction models are methods that provide robotic solutions with knowledge regarding the current and future state of a crop. Several studies have utilized IoT devices to create time-series datasets that can be used as input to growth prediction models. For example, <xref ref-type="bibr" rid="B41">Kierdorf et al. (2023)</xref> created a time series UAV-based image dataset of cauliflower growth characteristics including developmental stage and size. <xref ref-type="bibr" rid="B90">Weyler et al. (2021)</xref> collected images of beet plants throughout a cultivation period via ground robot and monitored phenotypic traits for growth stage classification.</p>
<p>Both time-series datasets were recorded in open-field environments for crops where a top-down view is sufficient for capturing the necessary phenotypic data. However, this collection method may be impractical for crops grown in protected environments given the space constraints and direction of growth. Thus, different strategies will need to be explored to ensure valid data is collected in these situations. CAD/CAM tools have the potential to assist with exploring new strategies for many harvesting applications. Modelling IoT devices and the crop structure in CAD will allow for testing various collection strategies within a simulated environment prior to fabrication. A study by <xref ref-type="bibr" rid="B32">Iqbal et al. (2020)</xref> demonstrated this approach by creating 3D CAD models of cotton crops and their LiDAR based robot designed to collect phenotypic data. They created a Gazebo simulation environment of the cotton field to test the identification capabilities of multiple LiDAR configurations from 3D point cloud data, which was converted into voxel grids, for navigation in the crop rows. Another study used Gazebo to model a sweet pepper environment with a UR5e robot arm to evaluate the accuracy of their fruit shape estimation approach based on super ellipsoids (<xref ref-type="bibr" rid="B50">Marangoz et al., 2022</xref>). Their simulation was integrated with an octree-based truncated signed distance field to map the images collected by the robot&#x2019;s RGB camera.</p>
</sec>
<sec id="s3">
<title>3 Artificial intelligence</title>
<p>IoT-based systems involve data collection at high velocity, volume, value, variety, and veracity, which are the 5&#xa0;V&#x2019;s that define Big Data. To effectively process and analyze this data in agriculture, a variety of tools and techniques have been explored. Artificial intelligence-based tools, such as Machine Learning (ML) and Deep Learning (DL) are among the most used. Other methods include cloud computing and edge computing. ML and DL play a vital role in object detection and localization and crop yield mapping. Integrating IoT networks with these algorithms allows for data-driven performance and decision-making.</p>
<sec id="s3-1">
<title>3.1 Machine learning</title>
<p>Machine learning techniques are categorized into three core learning methods: supervised learning, un-supervised learning, and reinforcement learning (<xref ref-type="bibr" rid="B36">Jordan and Mitchell, 2015</xref>). In supervised learning systems, a model is trained using a labelled dataset and forms predictions from learned mapping. Forms of this approach include decision trees (DT), random forest (RF), support vector machines (SVM), artificial neural networks (ANN), and Bayesian classifiers (<xref ref-type="bibr" rid="B63">Praveen Kumar et al., 2019</xref>). DTs are very sensitive to changes in input data and are prone to overfitting. RF models (<xref ref-type="fig" rid="F7">Figure 7A</xref>), offer more robust solutions in comparison. Random subsets of input data are used to form multiple DTs where the combined final prediction follows a majority rule or takes an average of the individual predictions. A representative feed-forward back propagation ANN structure is presented in <xref ref-type="fig" rid="F7">Figure 7B</xref>), consisting of three hidden layers between the input and output layers. The back propagation aspect of ANNs promotes feedback learning to improve predictive performance of the model. This adaptive nature makes them more suitable for use in agricultural environments since recent inputs can enhance model structure and performance.</p>
<fig id="F7" position="float">
<label>FIGURE 7</label>
<caption>
<p>
<bold>(A)</bold> Random forest model structure and <bold>(B)</bold> feed-forward back propagation ANN structure.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g007.tif"/>
</fig>
<p>Unsupervised learning models are trained using unlabeled data and employ algorithms such as k-means clustering, hierarchical clustering and fuzzy clustering. Clustering techniques are highly sensitive to outliers in the input data. In comparison to supervised learning approaches, these techniques are not as suitable for agricultural environments since its inherent variability presents a higher potential for outliers in the data. Reinforcement learning algorithms, such as Q-learning and Markov decision processes, take actions and learn from trial and error via training datasets composed of supervised and unsupervised learning (<xref ref-type="bibr" rid="B36">Jordan and Mitchell, 2015</xref>).</p>
<p>In agricultural harvesting applications, studies have utilized ML algorithms for crop yield predictions, crop growth stage classification and monitoring, optimal duration until harvest predictions and weather prediction/forecasting as shown in <xref ref-type="table" rid="T1">Table 1</xref>. However, this is not a mature area of research.</p>
<table-wrap id="T1" position="float">
<label>TABLE 1</label>
<caption>
<p>Summary of machine learning solutions for agricultural harvesting applications.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Source</th>
<th align="center">Crop</th>
<th align="center">Application</th>
<th align="center">Level of granularity</th>
<th align="center">Data source</th>
<th align="center">ML Method(s)</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">
<xref ref-type="bibr" rid="B92">Wise et al. (2022)</xref>
</td>
<td align="left">Strawberry</td>
<td align="left">Early prediction of optimal duration until harvest, crop parameter prediction (current-state and harvest-state), and growth stage classification</td>
<td align="left">Per-fruit</td>
<td align="left">RGB camera</td>
<td align="left">Linear regression (LR)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B87">Vijayakumar et al. (2021)</xref>
</td>
<td align="left">Citrus</td>
<td align="left">Yield prediction</td>
<td align="left">Per-tree and per-block (100&#x2b; trees)</td>
<td align="left">UAV multispectral imaging, DL-based ground image fruit count</td>
<td align="left">LR, gradient boost regression (GBR), random forest regression (RFR), partial least squares regression (PLSR)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B83">Torgbor et al. (2023)</xref>
</td>
<td align="left">Mango</td>
<td align="left">Yield prediction (time-series)</td>
<td align="left">Per-block and per-farm</td>
<td align="left">Satellite imagery (Landsat archive) and government weather data (SILO)</td>
<td align="left">random forest (RF), support vector regression (SVR), eXtreme gradient boosting (XGBOOST), PLSR, ridge regression (RIDGE), least absolute shrinkage and selection operator regression (LASSO)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B80">Tesfaye et al. (2021)</xref>
</td>
<td align="left">Wheat</td>
<td align="left">Yield prediction</td>
<td align="left">Per-field</td>
<td align="left">Satellite imagery (Sentinel-2)</td>
<td align="left">LR, regularized regression (GLMNET), generalized linear regression (GLM), neural network (NNET), k-nearest neighbours (kNN), recursive partitioning and regression trees (RPART), SVM, RF</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B79">Tatsumi et al. (2021)</xref>
</td>
<td align="left">Tomato</td>
<td align="left">Yield prediction and crop parameter prediction</td>
<td align="left">Per-plant</td>
<td align="left">UAV multispectral imaging</td>
<td align="left">RF, ridge regression (RI), SVM</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B78">Ta&#x15f;an et al. (2022)</xref>
</td>
<td align="left">Aubergine</td>
<td align="left">Yield prediction</td>
<td align="left">-</td>
<td align="left">Handheld spectroradiometer</td>
<td align="left">ANN, SVR, kNN, RF, adaptive boosting (AB)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B49">Maponya et al. (2020)</xref>
</td>
<td align="left">Grain</td>
<td align="left">Crop classification</td>
<td align="left">Per-region</td>
<td align="left">Satellite imagery (Sentinel-2)</td>
<td align="left">SVM, DT, kNN, RF, machine learning (ML)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B43">Kocian et al. (2020)</xref>
</td>
<td align="left">Lettuce (greenhouse)</td>
<td align="left">Growth prediction</td>
<td align="left">Per-plant</td>
<td align="left">Sensors</td>
<td align="left">Dynamic Bayseian Network (DBN)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B12">Chang et al. (2021)</xref>
</td>
<td align="left">Lettuce (greenhouse)</td>
<td align="left">Early prediction of optimal harvest day and growth prediction</td>
<td align="left">Per-plant</td>
<td align="left">Sensors, camera</td>
<td align="left">NN, fuzzy logic (FL)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B39">Khalifeh et al. (2022)</xref>
</td>
<td align="left">-</td>
<td align="left">Weather prediction</td>
<td align="left">-</td>
<td align="left">Sensors</td>
<td align="left">Social spider algorithm-least square-support vector machine (SSA-LS-SVM)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B25">Goh et al. (2022)</xref>
</td>
<td align="left">Wheat</td>
<td align="left">Growth monitoring</td>
<td align="left">Per-field</td>
<td align="left">Satellite imagery (Sentinel-2)</td>
<td align="left">PLS-R</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B27">Hassanzadeh et al. (2020)</xref>
</td>
<td align="left">Snap bean</td>
<td align="left">Crop classification (growth stage and maturity)</td>
<td align="left">Per-plant</td>
<td align="left">Spectroradiometer</td>
<td align="left">Perceptron (Perc), linear regression (LR), SVM, linear support vector classifier (LSVM), kNN, na&#xef;ve Bayes (NB), stochastic gradient descent (SGD), DT, RF</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Supervised learning approaches are most employed and yield prediction models are the most common use case. Estimating yield prior to harvest allows for accurate and timely planning (<xref ref-type="bibr" rid="B78">Ta&#x15f;an et al., 2022</xref>), which promotes efficient use of harvesting robots. Satellite and UAV imagery combined with ML methods have created accurate and detailed yield maps for crops grown in open fields (<xref ref-type="bibr" rid="B49">Maponya et al., 2020</xref>). UAV imagery (<xref ref-type="fig" rid="F8">Figure 8A</xref>) offers superior granularity when compared to satellite imagery (<xref ref-type="fig" rid="F8">Figure 8B</xref>), making it possible to estimate yields at the per-plant level as demonstrated in the work by <xref ref-type="bibr" rid="B79">Tatsumi et al. (2021)</xref>. The majority of these models derived vegetive indices (VIs), such as leaf area index (LAI), biomass, and evapo-transpiration (ET) from the images since these are indicative parameters of crop development (<xref ref-type="bibr" rid="B43">Kocian et al., 2020</xref>) that assist with determining crop yields (<xref ref-type="bibr" rid="B80">Tesfaye et al., 2021</xref>). However, this presents significant challenges for crops grown in open-field environments since poor lighting conditions from cloud coverage and shadows decrease image resolutions which can lead to critical growth stage gaps in the datasets (<xref ref-type="bibr" rid="B80">Tesfaye et al., 2021</xref>). This is also a challenge for open-field crop models that classify growth states and predict maturity levels or optimal durations until harvest. Implementing cloud restoration algorithms to restore data gaps was examined by <xref ref-type="bibr" rid="B80">Tesfaye et al. (2021)</xref>, which demonstrated an increased yield prediction accuracy.</p>
<fig id="F8" position="float">
<label>FIGURE 8</label>
<caption>
<p>Maps for yield estimation <bold>(A)</bold> Tomato green normalized difference vegetation index (GNDVI) from UAV multispectral imagery from <xref ref-type="bibr" rid="B79">Tatsumi et al. (2021)</xref>. Licensed under CC BY 4.0. doi: <ext-link ext-link-type="uri" xlink:href="https://10.1186/s13007-021-00761-2">10.1186/s13007-021-00761-2</ext-link> and <bold>(B)</bold> winter wheat field green area indices (GAI) from Sentinel-2 satellite imagery from <xref ref-type="bibr" rid="B25">Goh et al. (2022)</xref>. Licensed under CC BY-NC-ND 4.0. doi: <ext-link ext-link-type="uri" xlink:href="https://10.1016/j.jag.2022.103124">10.1016/j.jag.2022.103124</ext-link>.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g008.tif"/>
</fig>
<p>Predicting growth stages, time of maturity and associated time of harvest for a given crop enables efficient scheduling of automated harvesters (<xref ref-type="bibr" rid="B92">Wise et al., 2022</xref>). As a result of the limitations for image quality in open-field environments, these model types have been developed for crops grown in protected environments, including strawberries (<xref ref-type="bibr" rid="B92">Wise et al., 2022</xref>) and lettuce (<xref ref-type="bibr" rid="B43">Kocian et al., 2020</xref>; <xref ref-type="bibr" rid="B12">Chang et al., 2021</xref>). Currently, all solutions in <xref ref-type="table" rid="T1">Table 1</xref> are in a prototype or conceptual phase.</p>
<p>There are no works that have combined the mapping feature of yield models with the crop growth classification and maturity prediction models. Future research should address this gap to represent these two attributes in detailed CAD models. The framework in <xref ref-type="sec" rid="s7">Section 7</xref> is designed to facilitate the development of such models using CAD/CAM tools. It is also important to consider the scale of the model and the corresponding computational and memory resources required. With a model of this nature, geometric traits of crops can be predicted and verified throughout the growth cycle. Further, ideal harvest times can be estimated, which will assist with scheduling robotic harvesters.</p>
</sec>
<sec id="s3-2">
<title>3.2 Deep learning</title>
<p>Deep learning computer vision algorithms are being used to detect, localize, and classify crops in real time for robotic harvesting applications. Crop environments present complex conditions for these tasks, which stem from various crop geometries, orientations, maturity levels, illumination, and occlusions. DL-based techniques have demonstrated higher-level feature learning and detection accuracies in comparison to traditional ML-based techniques, which makes them more applicable in complex environments (<xref ref-type="bibr" rid="B6">Badeka et al., 2021</xref>; <xref ref-type="bibr" rid="B34">Jia et al., 2021</xref>).</p>
<p>Current DL-based vision solutions for robotic harvesting are summarized in <xref ref-type="table" rid="T2">Table 2</xref>. Versions of YOLO (You-Only-Look-Once) were most commonly utilized as they demonstrate high detection accuracy with quick processing times. However, these algorithms are most successful in relatively simple environments where the crop density is low, lighting is uniform, and there are few to no occlusions (<xref ref-type="bibr" rid="B6">Badeka et al., 2021</xref>; <xref ref-type="bibr" rid="B7">Bazame et al., 2021</xref>). Other solutions have developed unique algorithms to improve detection capabilities, such as Dasnet (<xref ref-type="bibr" rid="B37">Kang et al., 2020a</xref>) for apples, and FoveaMask (<xref ref-type="bibr" rid="B34">Jia et al., 2021</xref>) for green fruits.</p>
<table-wrap id="T2" position="float">
<label>TABLE 2</label>
<caption>
<p>Deep learning computer vision systems for agricultural harvesting operations.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Source</th>
<th align="center">Crop</th>
<th align="center">Application</th>
<th align="center">Method(s)</th>
<th align="center">Accuracy (method)</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">
<xref ref-type="bibr" rid="B22">Faisal et al. (2020)</xref>
</td>
<td align="left">Date Fruit</td>
<td align="left">Maturity level classification</td>
<td align="left">VGG-19, Inception-v3, NASNet</td>
<td align="left">99.4% (VGG-19)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B7">Bazame et al. (2021)</xref>
</td>
<td align="left">Coffee Fruits</td>
<td align="left">Object detection, maturity level classification and mapping</td>
<td align="left">YOLO-v3-tiny</td>
<td align="left">84%</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B99">Zhu et al. (2022)</xref>
</td>
<td align="left">Sugarcane</td>
<td align="left">Object detection and localization</td>
<td align="left">YOLO-v4</td>
<td align="left">94.40%</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B6">Badeka et al. (2021)</xref>
</td>
<td align="left">Grapes</td>
<td align="left">Object detection</td>
<td align="left">Faster R-CNN, YOLOv3, YOLOv5, EfficientDet-D0, RetinaNet, MobilNet</td>
<td align="left">77.9% (EfficientDet-D0) 77.2% (Faster R-CNN) 60.2% (YOLOv3) 73.2% (YOLOv5) 72.54% (RetinaNet) 71.79% (MobileNet)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B28">Hespeler et al. (2021)</xref>
</td>
<td align="left">Chili peppers</td>
<td align="left">Object detection</td>
<td align="left">YOLOv3 (RGB and thermal)</td>
<td align="left">100% (RGB) 97% (thermal)</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B42">Kim et al. (2023)</xref>
</td>
<td align="left">Cucumbers</td>
<td align="left">Object detection</td>
<td align="left">Amodal segmentaion</td>
<td align="left">50.06%</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B54">Miragaia et al. (2021)</xref>
</td>
<td align="left">Plums</td>
<td align="left">Ripeness classification</td>
<td align="left">AlexNet</td>
<td align="left">95.50%</td>
</tr>
<tr>
<td rowspan="2" align="left">
<xref ref-type="bibr" rid="B8">Benavides et al. (2020)</xref>
</td>
<td rowspan="2" align="left">Tomatoes (beef and cluster varieties)</td>
<td rowspan="2" align="left">Object detection and localization (ripe tomatoes and their peduncles)</td>
<td rowspan="2" align="left">Sobel</td>
<td align="left">80.8% [beef]</td>
</tr>
<tr>
<td align="left">87.5% [cluster]</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B88">Wang et al. (2021)</xref>
</td>
<td align="left">Grapes</td>
<td align="left">Object detection</td>
<td align="left">SwinGD</td>
<td align="left">70%</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B97">Yu et al. (2019)</xref>
</td>
<td align="left">Strawberries</td>
<td align="left">Object detection and localization</td>
<td align="left">Mask-RCNN</td>
<td align="left">95.78%</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B47">Liu et al. (2020)</xref>
</td>
<td align="left">Tomatoes</td>
<td align="left">Detection</td>
<td align="left">YOLO-Tomato (YOLO-v3 &#x2b; NN)</td>
<td align="left">96.40%</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B37">Kang et al. (2020a)</xref>
</td>
<td align="left">Apples</td>
<td align="left">Object detection</td>
<td align="left">Dasnet</td>
<td align="left">87%</td>
</tr>
<tr>
<td align="left">
<xref ref-type="bibr" rid="B34">Jia et al. (2021)</xref>
</td>
<td align="left">Green Fruits</td>
<td align="left">Object detection and localization</td>
<td align="left">FoveaMask</td>
<td align="left">75%</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Several advanced DL-based techniques have been explored to limit the influence of occlusions from overlapping crops, leaves, stems, and other objects. It is important to address this issue since retaking images from various viewpoints will increase cycle times. General instance segmentation models, such as Mask-RCNN (Mask Region-Based Convolutional Neural Network) are common choices in agricultural computer visions systems for addressing occlusions. In <xref ref-type="bibr" rid="B97">Yu et al. (2019)</xref>, ripe strawberry fruits were successfully detected and picking points were localized with a 95.78% accuracy using Mask-RCNN. The solution indicated effective detection in situations with varying illumination, multi-fruit adhesion, overlapping fruits, and occlusions. However, the processing time was slow and large errors were observed in cases of unsymmetrical fruits. Amodal segmentation, a one-stage model, was used by <xref ref-type="bibr" rid="B42">Kim et al. (2023)</xref> to predict the blocked regions of cucumbers with an accuracy of 50.06%. This model was significantly slower than Mask-RCNN models and other general instance segmentation models. <xref ref-type="bibr" rid="B47">Liu et al. (2020)</xref> developed YOLO-Tomato, a YOLOv3 and circular bounding box-based model that is robust to illumination and occlusions conditions. Performance loss was indicated in severe occlusion cases, however, the authors stated that this is not a vital issue since harvesting robots operate alternatively between detection and picking. Thus hidden crops will appear after the front ones have been removed. Although this ideology is not applicable in cases where ripe crops are hidden behind unripe crops since removal will not occur. <xref ref-type="bibr" rid="B28">Hespeler et al. (2021)</xref> utilized thermal imaging for the harvesting of chili peppers, which improved the detection accuracy in environments with variant lighting and heavy occlusion from leaves or overlapping peppers. This method outperformed both YOLO and Mask-RCNN algorithms with respect to detection accuracy.</p>
<p>It is important to note that all these computer vision solutions for agricultural harvesting are based on real-time performance. Their implementation in crop environments requires the ability to accurately detect, localize, and classify crops despite occlusions and varying illumination conditions. However, the solutions that incorporated these parameters all demonstrated slower processing times compared to traditional DL-methods. As a result of these limitations, all are currently in a prototype phase. Thus, it is essential to explore alternatives approaches to improve robotic harvesting performance. Future research should focus on reverse engineering the environment and utilizing CAD and CAM tools to create 3D crop maps prior to harvest as this may offer better performance in situations with occlusions from crop overlapping, other objects in the environment, and with varying illumination conditions.</p>
<p>In the study by <xref ref-type="bibr" rid="B38">Kang et al. (2020b)</xref>, an octrees-based 3D representation of the crop environment for a robotic apple harvester was created, which included the locations and orientations of the fruit and obstacles (branches) in the scene. The positional information from the model was integrated with a central control algorithm to compute the proper grasping pose during picking and the trajectory path. This reverse engineered model operated in real-time and only incorporated objects within the working view of the camera. There is potential to expand this methodology to develop environmental models that incorporate predictions for maturity levels, physical growth characteristics, and other geometric properties for the entire working area. By leveraging forms of in-process techniques and a-priori knowledge, the influence from environmental complexities and dependency on real-time performance can be reduced. Furthermore, integrating DL-based 3D crop maps with ML growth prediction and maturity models can support downstream robotic design, end-effector design, trajectory planning, and optimal harvest time decisions.</p>
</sec>
</sec>
<sec id="s4">
<title>4 Advanced robotics</title>
<p>Automated robotic solutions are being developed to perform agricultural harvesting pick and place tasks (<xref ref-type="fig" rid="F9">Figure 9</xref>). Recent research efforts have focused on addressing particular technical elements, including computer vision systems for object detection and localization, motion and trajectory planning algorithms, and EOAT design. An overview of current solutions that incorporate all these aspects is shown in <xref ref-type="table" rid="T3">Table 3</xref>.</p>
<fig id="F9" position="float">
<label>FIGURE 9</label>
<caption>
<p>
<bold>(A)</bold> Automated robotic harvesting solution for strawberries from <xref ref-type="bibr" rid="B93">Xiong et al. (2019)</xref>. Licensed under CC BY-NC-ND 4.0. doi: <ext-link ext-link-type="uri" xlink:href="https://10.1002/rob.21889">10.1002/rob.21889</ext-link>. <bold>(B)</bold> schematic of automated harvesting solutions.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g009.tif"/>
</fig>
<table-wrap id="T3" position="float">
<label>TABLE 3</label>
<caption>
<p>Summary of recent robotic crop harvesting solutions.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Source</th>
<th align="center">Crop</th>
<th align="center">Environment</th>
<th align="center">Robotic arm (commercial name)</th>
<th align="center">Mobile navigation</th>
<th align="center">Computer vision algorithm (camera)</th>
<th align="center">Harvest success rate (unmodified real environment scenario)</th>
<th align="center">Cycle time (s)</th>
<th align="center">Stage of development</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<xref ref-type="bibr" rid="B53">Miao et al. (2023)</xref>
</td>
<td align="center">Tomato</td>
<td align="center">Protected</td>
<td align="center">6 DOF</td>
<td align="center">SLAM</td>
<td align="center">YOLOv5 (Intel RealSense D435i)</td>
<td align="center">90%</td>
<td align="center">9</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B77">Stepanova et al. (2023)</xref>
</td>
<td align="center">Tomato</td>
<td align="center">Protected</td>
<td align="center">6 DOF (UR5)</td>
<td align="center">-</td>
<td align="center">YOLO (Intel RealSense D435i and Zivid Two Industrial 3D)</td>
<td align="center">-</td>
<td align="center">16</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B14">Chen et al. (2023)</xref>
</td>
<td align="center">Pitaya (Dragon fruit)</td>
<td align="center">Open-field</td>
<td align="center">3 DOF</td>
<td align="center">2D SLAM</td>
<td align="center">YOLOv3-tiny (1080p Webcam)</td>
<td align="center">97%</td>
<td align="center">&#x223c;104</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B95">Yin et al. (2023)</xref>
</td>
<td align="center">Citrus</td>
<td align="center">Orchard</td>
<td align="center">6 DOF</td>
<td align="center">SLAM</td>
<td align="center">YOLOv4 tiny (Intel RealSense D435i)</td>
<td align="center">(87.20%)</td>
<td align="center">10.9</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B10">Brown and Sukkarieh (2021)</xref>
</td>
<td align="center">Plum</td>
<td align="center">Orchard</td>
<td align="center">6 DOF (UR5)</td>
<td align="center">SLAM</td>
<td align="center">YOLOv3 and HSV (Intel RealSense D43i)</td>
<td align="center">(42%)</td>
<td align="center">12 [from previous lab testing]</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B46">Lehnert et al. (2020)</xref>
</td>
<td align="center">Sweet pepper</td>
<td align="center">Protected</td>
<td align="center">6 DOF (UR5)</td>
<td align="center">-</td>
<td align="center">HSV colour segmentation, PFG-SVM and MiniInception (RGB-D)</td>
<td align="center">76.5% (47%)</td>
<td align="center">36.9</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B4">Arad et al. (2020)</xref>
</td>
<td align="center">Sweet pepper</td>
<td align="center">Protected</td>
<td align="center">6 DOF (Fanuc LR Mate 200iD)</td>
<td align="center">Arduino-based PLC</td>
<td align="center">Shape and colour based detection (RGB-D)</td>
<td align="center">61% (29%)</td>
<td align="center">24</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B9">Birrell et al. (2020)</xref>
</td>
<td align="center">Lettuce</td>
<td align="center">Open-field</td>
<td align="center">6 DOF (UR10)</td>
<td align="center">-</td>
<td align="center">YOLOv3 and DOCN (USB webcam)</td>
<td align="center">(88.20%)</td>
<td align="center">31.7</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B72">Sep&#xfa;Lveda et al. (2020)</xref>
</td>
<td align="center">Aubergine</td>
<td align="center">Open-field</td>
<td align="center">2 &#xd7; 6 DOF (Kinova MICO)</td>
<td align="center">-</td>
<td align="center">Image segmentation algorithm [SVM] (Prosilica GC2450C and Mesa SwissRanger SR4000)</td>
<td align="center">91.67%</td>
<td align="center">26.2</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B96">Yu et al. (2021)</xref>
</td>
<td align="center">Apple</td>
<td align="center">Orchard</td>
<td align="center">2 &#xd7; 6 DOF (custom)</td>
<td align="center">-</td>
<td align="center">SIFT (binocular)</td>
<td align="center">72%</td>
<td align="center">14.6</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B93">Xiong et al. (2019)</xref>
</td>
<td align="center">Strawberry</td>
<td align="center">Protected</td>
<td align="center">5 DOF</td>
<td align="center">Joystick</td>
<td align="center">Shape and colour based detection (RGB-D)</td>
<td align="center">(53.6%)</td>
<td align="center">10.6</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B91">Williams et al. (2019)</xref>
</td>
<td align="center">Kiwi</td>
<td align="center">Orchard</td>
<td align="center">4 &#xd7; 3 DOF (custom)</td>
<td align="center">-</td>
<td align="center">FCN-8s [adapted form of VGG-net16] (Baslar ac 1920-40uc USB 3.0]</td>
<td align="center">(51%)</td>
<td align="center">5.5</td>
<td align="center">Prototype</td>
</tr>
<tr>
<td align="center">
<xref ref-type="bibr" rid="B38">Kang et al. (2020b)</xref>
</td>
<td align="center">Apple</td>
<td align="center">Orchard</td>
<td align="center">6 DOF (UR5)</td>
<td align="center">-</td>
<td align="center">Mobile-DasNet (Intel RealSense D-435 RGB-D)</td>
<td align="center">80%</td>
<td align="center">6.5</td>
<td align="center">Prototype</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Notably, a considerable number of studies have concentrated on crops grown in protected environments (tomatoes, strawberries, sweet peppers). Protected environments offer diffused lighting, shelter from adverse weather, and level terrains, which are advantageous to automated harvesting. However, difficulties with real-time mobile navigation, and object detection, localization, and grasping remain a barrier to commercial adoption. To date, many of these solutions are in the prototype stage with very few stating near commercial readiness. Other companies have presented solutions that have achieved commercial product trials, including strawberry harvesting robots (<xref ref-type="bibr" rid="B2">Advanced Farm Technologies, 2023</xref>; <xref ref-type="bibr" rid="B3">Agrobot, n.d.</xref>), apple harvesting robots (<xref ref-type="bibr" rid="B1">Advanced Farm Technologies, 2023</xref>), and tomato harvesting robots (<xref ref-type="bibr" rid="B65">Ridder and MetoMotion, n.d.</xref>; <xref ref-type="bibr" rid="B31">Panasonic, 2018</xref>).</p>
<p>Few solutions have incorporated a form of autonomous navigation in the harvesting environment. Simultaneous Localization and Mapping (SLAM) is the most applied algorithm, allowing the harvesting units to map out their environment and localize in real time. SLAM maps are constructed using point cloud data (<xref ref-type="bibr" rid="B95">Yin et al., 2023</xref>). Solutions for crops grown in unprotected environments were more likely to incorporate this element than those in protected environments. For example, a robot designed for citrus fruits (<xref ref-type="bibr" rid="B95">Yin et al., 2023</xref>) was tested in a large field environment with a multisensory fusion SLAM algorithm and demonstrated a localization and sensing performance suitable for harvest. Similarly, a plum harvesting robot (<xref ref-type="bibr" rid="B10">Brown and Sukkarieh, 2021</xref>) used a SLAM camera for global robot tracking to assist with creating crop density maps.</p>
<p>Variations of the YOLO algorithm were most used for object detection and localization across robotic solutions. YOLO is one-stage CNN (Convolutional Neural Network) that has fast detection and high accuracy performance in real-time, but struggles with detecting small objects (<xref ref-type="bibr" rid="B6">Badeka et al., 2021</xref>). As mentioned in the previous section, occlusions in the natural growing environment are one of the largest limitations to the accuracy of object detection and localization in computer vision algorithms. Occlusions not only lower detection accuracy but can also increase the harvesting cycle time from algorithm processing or the need for additional image viewpoints. Many solutions leveraged modified crop environments during testing, where obstacles causing occlusions, such as leaves, stems, or overlapping crops, were removed prior to harvest. This demonstrated much higher harvesting success rates for sweet peppers (<xref ref-type="bibr" rid="B4">Arad et al., 2020</xref>). As mentioned previously, efforts to create algorithms that predict object position and orientation, such as Mask-RCNN have been developed to address this issue. Furthermore, dual arm robotic solutions have also been explored to reduce the challenges that arise from occlusions. In the work by <xref ref-type="bibr" rid="B72">Sep&#xfa;Lveda et al. (2020)</xref>, one robotic arm was dedicated to removing the objects causing occlusions while the other proceeded with the harvesting actions of aubergines. This method demonstrated longer cycle times but had higher productivity in terms of the number of fruits harvested over a given period.</p>
<p>A specialty end-effector was designed for each robotic solution since the crops varied with respect to their shape, size, and environment. When working with crops in dense environments or highly clustered crops, the design of the end-effector becomes more difficult due to the increased risk of damage to the crop itself or the surrounding objects during picking. Many of the end-effector designs demonstrated high success rates when the crops were accurately detected and localized. Most challenges for end-effector performance arose due to incorrect positioning from neighbouring crops or obstacles, inability to access the crop due to rigid and large designs, and slow cycle times from heavy weight. For example, in <xref ref-type="bibr" rid="B53">Miao et al. (2023)</xref>, the gripper design was too rigid and large, which made picking tomato stems challenging if they were either angled or short. A similar issue was noted by <xref ref-type="bibr" rid="B10">Brown and Sukkarieh (2021)</xref>, where the oversized soft gripper design caused issues with plum picking and concerns with longevity resulting from damage of the silicon material. Difficulties in positioning were observed by <xref ref-type="bibr" rid="B4">Arad et al. (2020)</xref> and <xref ref-type="bibr" rid="B95">Yin et al. (2023)</xref> when obstacles and neighbouring fruit blocked the end-effector from reaching its intended picking position. Weight is also a crucial factor as it can significantly impact the cycle time and risk damaging the crop during harvest, which was highlighted in a solution for lettuce (<xref ref-type="bibr" rid="B9">Birrell et al., 2020</xref>). The heavy weight of the end effector caused picking to be the rate limiting step and a high damage rate was also observed.</p>
<p>CAD/CAM tools play a critical role in the design and development of specialty end-effectors for robotic harvesting. Simulations allow for testing and validating grasping and picking strategies, material selection, and mechanical design. For example, <xref ref-type="bibr" rid="B23">Fan et al. (2021)</xref> investigated multiple apple grasping principles and picking patterns by developing 3D branch-stem-fruit models and conducting finite element simulations in ABAQUS to compute and compare separation forces. An underactuated broccoli gripper design was validated using ADAMS (Automatic Dynamic Analysis of Mechanical Systems) software that measured applied contact forces (<xref ref-type="bibr" rid="B94">Xu et al., 2023</xref>). ADAMS software was also used by <xref ref-type="bibr" rid="B55">Mu et al. (2020)</xref> to validate trajectory motions of the bionic fingers in a kiwifruit robotic end-effector design. Simulation is also being used to explore best practices for robotic harvesting solutions (Van De Walker et al., 2021) with software such as V-REP, Gazebo, ArGOS, and Webots (<xref ref-type="bibr" rid="B32">Iqbal et al., 2020</xref>).</p>
<p>Future works should focus on compliant mechanism design for robotic harvesting end-of-arm-tooling for crops that are harvested by gripping. Contact-driven, cable-driven, and fluid-driven actuation methods are suitable for use in agricultural environments. Silicon rubbers are common choices for gripper fabrication in this domain, but are limited by their susceptibility to damage from surrounding objects in crop environments. Leveraging AM to fabricate compliant structures allows for rapid prototyping and customization. For example, novel fluid-driven soft robotic fingers for apple harvesting were designed and realized via 3D printing (<xref ref-type="bibr" rid="B89">Wang et al., 2023</xref>). Coupling designs with sensor technologies can provide feedback necessary for subsequent optimization and should also be further explored.</p>
</sec>
<sec id="s5">
<title>5 Summary</title>
<p>I4.0 technologies and CAD/CAM tools have been shown to support automated data-driven solutions for harvesting agricultural crops, but this is not a mature field. IoT networks coupled with machine learning algorithms are indicating that improved yields, growth predictability, and weather forecasting can be implemented, providing new ways of seeing related to designing and managing the agricultural space. However, it is clear that most solutions are either in a prototype or conceptual phase as there are several challenges and barriers preventing successful operation in real crop environments (<xref ref-type="sec" rid="s6">Section 6</xref>).</p>
<p>CAD/CAM tools have been leveraged using the standard design packages available. Future research should be directed at implementing novel CAD/CAM design tools (such as voxelized representations and developing simulation solutions that include the mechanical and physical properties of plants, leaves, etc.) with existing I4.0-based solutions to form a completely integrated solution targeting the unique challenges in the agricultural domain. These can facilitate harvest decisions at the granular and systems level, as discussed in the presented framework in <xref ref-type="sec" rid="s7">Section 7</xref>.</p>
</sec>
<sec id="s6">
<title>6 Challenges and barriers to the adoption of I4.0 technologies and CAD/CAM tools</title>
<p>For widespread implementation of advanced I4.0 technologies and CAD/CAM tools across agricultural harvesting operations, there are several challenges and barriers that first need to be addressed. These originate from the several uncertainties within the dynamic crop environment, data sufficiency, and other technical aspects.</p>
<p>Throughout growth, crop parameters such as size, shape, colour, position, and orientation can change considerably. Additionally, these parameters are significantly different between crop types. This product variability creates difficulties for computer vision system operation, tool design and implementation, process standardization, and CAD modelling and simulation. For example, fruits and vegetables that are green in colour when mature often share a resemblance in hue to surrounding plant objects, such as vines, leaves, and stems. These scenarios hinder the ability for computer vision algorithms to detect and localize the product. Neighbouring objects and overlapping crops also influence vision system performance as well as end-effector design and implementation. These obstacles can cause damage to end-effectors and limit their ability to access the product. Non-homogenous properties of crops pose a challenge for capturing the true mechanical characteristics in 3D models and simulations. For instance, sections of a tomato crop vine may be more rigid than others, although current finite element analysis methods would represent a 3D model of this as a uniform rigid body.</p>
<p>It is also important to note the extent of data required in adopting these advanced technologies may be difficult to consolidate. As mentioned previously, crop environments are dynamic and variable. To develop representative and robust computerized models, the inherent unpredictability needs to be captured. This will require sufficient data for continuous model updating and processing. As a result, model history tree structures will become large and complicated and will likely require significant memory capacity and computational resources. Another challenge is ensuring the compatibility of the models with downstream analytical algorithms. Depending on the representation type, conversions may be necessary. For instance, 3D point clouds are not effective inputs to deep learning algorithms and must be translated into other volumetric forms, such as voxel grids. Methods to convert between data types for deep learning have been developed. One example is NVIDIA&#x2019;s Kaolin library which provides a PyTorch API to work with several forms of 3D representations (<xref ref-type="bibr" rid="B17">NVIDIA Corporation, 2020</xref>). This library includes an expanding collection of GPU-optimized operations for quick conversions between CAD representation types, data loading, volumetric acceleration data structures, and many other techniques.</p>
</sec>
<sec id="s7">
<title>7 Framework</title>
<p>A framework based on the IoT structure from <xref ref-type="bibr" rid="B74">Shi et al. (2019)</xref> has been developed to facilitate future CAD/CAM research and development for agricultural harvesting in the era of I4.0 (<xref ref-type="table" rid="T4">Table 4</xref>, <xref ref-type="table" rid="T5">5</xref>).</p>
<table-wrap id="T4" position="float">
<label>TABLE 4</label>
<caption>
<p>Framework for future CAD/CAM research and development in agricultural harvesting.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th rowspan="2" align="left"/>
<th rowspan="2" align="center">IoT layer (<xref ref-type="bibr" rid="B74">Shi et al., 2019</xref>)</th>
<th rowspan="2" align="center">Layer Description</th>
<th rowspan="2" align="center">CAD</th>
<th colspan="2" align="center">Application</th>
</tr>
<tr>
<th align="center">Open-field environment</th>
<th align="center">Protected environment</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td rowspan="2" align="center">
<bold>Data</bold>
</td>
<td align="left">Perception</td>
<td align="left">Physical devices</td>
<td align="left">Point clouds, images, videos, physical measurements</td>
<td align="left">Group processing (per-field, per-block)</td>
<td align="left">Individual processing (per-plant, per-produce)</td>
</tr>
<tr>
<td align="left">Network</td>
<td align="left">Wired or wireless communications</td>
<td align="left">N/A</td>
<td align="left">N/A</td>
<td align="left">N/A</td>
</tr>
<tr>
<td rowspan="2" align="center">
<bold>Knowledge</bold>
</td>
<td align="left">Middleware</td>
<td align="left">Data aggregation</td>
<td align="left">Standardized representation (B-rep voxel-grid, octree, or VSK) and tools</td>
<td colspan="2" align="left">Establish common standard using produce classification scheme (see <xref ref-type="table" rid="T5">Table 5</xref>) and develop a materials properties knowledge base for produce/plants</td>
</tr>
<tr>
<td align="left">Common platform</td>
<td align="left">Data storage and analytics</td>
<td align="left">Generate in-process model instances</td>
<td colspan="2" align="left">Jack, process flows, yield maps, V-REP, Gazebo, etc.</td>
</tr>
<tr>
<td align="center">
<bold>Design space</bold>
</td>
<td align="left">Application</td>
<td align="left">Management platforms and systems</td>
<td align="left">Design, build, simulate, test, use</td>
<td colspan="2" align="left">End-of-arm-tooling, harvest planning, grasping and manipulation strategies, ergonomics, monitoring, etc.</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T5" position="float">
<label>TABLE 5</label>
<caption>
<p>Classification scheme for produce.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Parameter</th>
<th align="center">Shape</th>
<th align="center">Growth location</th>
<th align="center">Growth pattern</th>
<th align="center">Harvest contact points</th>
<th align="center">Harvest method</th>
<th align="center">Stem Mechanics</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td rowspan="7" align="center">
<bold>Classific</bold>
<bold>ation</bold>
</td>
<td align="left">Round <bold>(RO)</bold>
</td>
<td align="left">Gound <bold>(G)</bold>
</td>
<td align="left">Cluster <bold>(C)</bold>
</td>
<td align="left">Body <bold>(BD)</bold>
</td>
<td align="left">Hand <bold>(H)</bold>
</td>
<td align="left">Rigid <bold>(R)</bold>
</td>
</tr>
<tr>
<td align="left">Oblong <bold>(O)</bold>
</td>
<td align="left">Bush <bold>(B)</bold>
</td>
<td align="left">Individual <bold>(I)</bold>
</td>
<td align="left">Base <bold>(BA)</bold>
</td>
<td align="left">Shears <bold>(S)</bold>
</td>
<td align="left">Flexible <bold>(F)</bold>
</td>
</tr>
<tr>
<td align="left">Oblate <bold>(OBL)</bold>
</td>
<td align="left">Tree <bold>(TR)</bold>
</td>
<td align="left"/>
<td align="left">Stem <bold>(ST)</bold>
</td>
<td align="left"/>
<td align="left">Brittle <bold>(B)</bold>
</td>
</tr>
<tr>
<td align="left">Ovoid <bold>(OV)</bold>
</td>
<td align="left">Trellis <bold>(TRL)</bold>
</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
</tr>
<tr>
<td align="left">Elliptic <bold>(E)</bold>
</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
</tr>
<tr>
<td align="left">Conic <bold>(C)</bold>
</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
</tr>
<tr>
<td align="left">Lobe <bold>(L)</bold>
</td>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
<td align="left"/>
</tr>
</tbody>
</table>
</table-wrap>
<p>At the perception layer, data collected from physical IoT devices exhibit various forms of CAD representations. In agricultural systems, 3D point clouds from LiDAR systems, images from UAVs, or physical measurements from sensor networks are all examples of data that can be collected in real-time. The granularity of this data is dependent on the environment type. Obtaining data per-produce will be difficult in open-field environments since the operational scale is significantly larger in comparison to protected environments. It is recommended that a group-processing methodology (per-field or per-block) is applied to collect data in these environments and can be achieved using satellite imagery or UAVs.</p>
<p>To develop effective models for agricultural harvesting activities, it is important to aggregate this data into a standardized CAD representation. This is similar to the function of the middleware layer in an IoT system. By classifying produce according to key parameters (<xref ref-type="table" rid="T5">Table 5</xref>), appropriate representations can be established. For example, produce that are round, have a cluster growth pattern, and are harvested by contacting the body, such as mushrooms and blueberries, may be best represented using VSKs or SDFs to conserve the individual object boundaries in the cases of overlap. Whereas produce that grow individually and are harvested by cutting a rigid stem, such as sweet peppers or citrus fruits, may be best represented using 3D voxel-grids or octrees since their respective object boundaries are more easily identifiable. The representation type will also depend on the growth location of the produce and the associated objects that will be incorporated in the model, such as branches, leaves, and vines.</p>
<p>The aggregated data from IoT systems are stored and analyzed in a common platform. For CAD applications, the CAD software represents this platform. Instance models can be developed and applied for a variety of harvesting activities, such as Siemens Jack (<xref ref-type="bibr" rid="B76">Siemens Industry Software, 2011</xref>) for ergonomic analysis, AnyLogic (<xref ref-type="bibr" rid="B81">The AnyLogic Company, n. d.</xref>) for process flows, and Gazebo (<xref ref-type="bibr" rid="B60">Open Source Robotics Foundation, Inc., n. d.</xref>) for robotic harvesting strategies.</p>
<p>Preliminary research in the mushroom industry has been selected as a case study to highlight the CAD/CAM framework and is demonstrated in <xref ref-type="table" rid="T6">Table 6</xref>.</p>
<table-wrap id="T6" position="float">
<label>TABLE 6</label>
<caption>
<p>Application of the framework to preliminary research in the mushroom industry.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left"/>
<th align="center">IoT layer</th>
<th align="center">Layer Description</th>
<th align="center">CAD (<xref ref-type="fig" rid="F10">Figure 10A-E</xref>)</th>
<th align="center">Application</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td rowspan="10" align="center">
<bold>Data</bold>
</td>
<td rowspan="9" align="left">Perception</td>
<td rowspan="9" align="left">Physical devices</td>
<td align="left">&#x2022; Images:</td>
<td rowspan="9" align="left">Protected environment: mushroom farm</td>
</tr>
<tr>
<td align="left">o Mushrooms</td>
</tr>
<tr>
<td align="left">o Infrastructure</td>
</tr>
<tr>
<td align="left">o Static posture</td>
</tr>
<tr>
<td align="left">o Markers for motion analyses (<xref ref-type="fig" rid="F10">Figure 10A</xref>)</td>
</tr>
<tr>
<td align="left">&#x2022; Videos: o Harvesting, trimming, and placement (REB&#x23;22&#x2013;205)</td>
</tr>
<tr>
<td align="left">&#x2022; Physical measurements: <monospace>o</monospace> Specialty glove for motion (<xref ref-type="fig" rid="F10">Figure 10B</xref>) and force over time data</td>
</tr>
<tr>
<td align="left">o Mushroom size, shape, mass, age</td>
</tr>
<tr>
<td align="left">o Mushroom bed size, shape</td>
</tr>
<tr>
<td align="left">Network</td>
<td align="left">Wired or wireless communications</td>
<td align="left">N/A</td>
<td align="left">Mushroom classification:</td>
</tr>
<tr>
<td rowspan="6" align="center">
<bold>Knowledge</bold>
</td>
<td rowspan="5" align="left">Middleware</td>
<td rowspan="5" align="left">Data aggregation</td>
<td align="left">&#x2022; Mushroom CAD models<xref ref-type="table-fn" rid="Tfn1">
<sup>a</sup>
</xref> (<xref ref-type="fig" rid="F10">Figure 10C</xref>)</td>
<td align="left">&#x2022; Shape&#x2013;<bold>(RO)</bold>
</td>
</tr>
<tr>
<td align="left">&#x2022; Frame CAD models</td>
<td align="left">&#x2022; Growth location&#x2013;<bold>(G)</bold>
</td>
</tr>
<tr>
<td align="left">&#x2022; Anthropometric models of the human harvesters</td>
<td align="left">&#x2022; Growth pattern&#x2013;<bold>(C)</bold>
</td>
</tr>
<tr>
<td align="left">&#x2022; Time, force, and motion graphs</td>
<td align="left">&#x2022; Harvest contact points&#x2013;<bold>(BA)</bold>
</td>
</tr>
<tr>
<td align="left">&#x2022; Maximum picking force and bruising force data</td>
<td align="left">&#x2022; Harvest method&#x2013;<bold>(H)</bold>
</td>
</tr>
<tr>
<td align="left">Common platform</td>
<td align="left">Data storage and analytics</td>
<td align="left">&#x2022; Siemens NX and Jack</td>
<td align="left">&#x2022; Stem mechanics&#x2013;<bold>(B)</bold>
</td>
</tr>
<tr>
<td rowspan="4" align="center">
<bold>Design space</bold>
</td>
<td rowspan="4" align="left">Application</td>
<td rowspan="4" align="left">Management platforms and systems</td>
<td align="left">&#x2022; Specialty gripper design<xref ref-type="table-fn" rid="Tfn1">
<sup>b</sup>
</xref>, test, and simulation activities (<xref ref-type="fig" rid="F10">Figure 10D</xref>)</td>
<td align="left"/>
</tr>
<tr>
<td align="left">&#x2022; Ergonomics assessment (<xref ref-type="fig" rid="F10">Figure 10E</xref>)</td>
<td align="left"/>
</tr>
<tr>
<td align="left">&#x2022; &#x2018;What if&#x2019; studies for bed redesign</td>
<td align="left"/>
</tr>
<tr>
<td align="left">&#x2022; Feasibility assessment for robotic harvesting</td>
<td align="left"/>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn id="Tfn1">
<label>
<sup>a</sup>
</label>
<p>Future work &#x2192; Voronoi representation templates for mushroom models.</p>
</fn>
<fn id="Tfn2">
<label>
<sup>b</sup>
</label>
<p>Future work &#x2192; embedded sensors.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>Images, videos, and physical measurements were collected at the per-produce level in a commercial farm environment (<xref ref-type="fig" rid="F10">Figures 10A, B</xref>). From this data, CAD models of mushrooms (<xref ref-type="fig" rid="F10">Figure 10C</xref>) and their growth environment (frames) were developed in Siemens NX software (<xref ref-type="bibr" rid="B103">NX software Siemens Software, n.d.</xref>). Harvesting time, force, and motion graphs were produced, and maximum picking and bruising forces were established. Specialty grippers for mushroom harvesting (<xref ref-type="fig" rid="F10">Figure 10D</xref>) were designed, simulated, and tested. These were also used to perform feasibility assessments for robotic harvesting. Ergonomic assessments and &#x2018;what if&#x2019; studies were conducted for various harvesting scenarios using Siemens Jack software with anthropometric human harvester models (<xref ref-type="fig" rid="F10">Figure 10E</xref>). This conceptual model featured mushrooms objects that are uniform in size, shape, location, and orientation. In the real environment mushrooms have varying geometric and physical parameters, and growth is clustered. Although useful for analyzing ergonomics in select static harvesting situations, this model is not a true representation of a growing mushroom environment.</p>
<fig id="F10" position="float">
<label>FIGURE 10</label>
<caption>
<p>
<bold>(A)</bold> markers for mushroom picking motion analysis <bold>(B)</bold> force measurement glove <bold>(C)</bold> CAD model of mushroom <bold>(D)</bold> CAD models of specialty gripper design <bold>(E)</bold> Siemens Jack anthropometric model for ergonomic analysis.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g010.tif"/>
</fig>
<p>Future research should focus on establishing model instances throughout a harvest cycle with standardized representations based on real-time data collected by IoT devices. An ANN approach, where image data is collected along with representative geometry (Euclidean or Non-Euclidean) input data (<xref ref-type="fig" rid="F11">Figure 11A</xref>) can be used to develop accurate CAD models (SDF or voxel based) from baseline instances of different shapes. This would facilitate adaptable end-effector designs. At the systems level, aggregate modeling that considers discrete events in tandem with agent-based simulations (<xref ref-type="bibr" rid="B81">The AnyLogic Company, n. d.</xref>) can be developed using more realistic scenarios for crop management (<xref ref-type="fig" rid="F11">Figure 11B</xref>) to detailed harvesting activities such as robotic trajectories.</p>
<fig id="F11" position="float">
<label>FIGURE 11</label>
<caption>
<p>
<bold>(A)</bold> ANN structure for training and validation for produce and plant features <bold>(B)</bold> AnyLogic model of mushroom harvesting at the systems level.</p>
</caption>
<graphic xlink:href="fmtec-03-1282843-g011.tif"/>
</fig>
<p>Responses to compression forces (deflection, bruising, etc.) (<xref ref-type="bibr" rid="B64">Recchia et al., 2023</xref>) and other mechanical and physical properties need to be determined experimentally to have a baseline for effective downstream modeling simulations, whether machining or finite element based. As the knowledge base develops for a harvest type, design improvements throughout the growth cycle for the automation can be implemented.</p>
</sec>
<sec sec-type="conclusion" id="s8">
<title>8 Conclusion</title>
<p>In order to ensure that food production will meet the demand of a rapidly growing global population, agricultural harvesting systems need to transform their existing practices. I4.0 technologies are driving more efficient use of inputs, remote environmental control in greenhouses, automated harvesting, and enhanced farm management practices. Several solutions that utilized these technologies, including Internet of Things, machine learning, deep learning and advanced robotics, were highlighted throughout this review. CAD and CAM tools supported the development of 3D crop environment models to simulate, test, and validate harvesting strategies, trajectory plans, and grasping poses. CAD tools also facilitated the design of robotic end-of-arm tooling, rigid-body, and other auxiliary components. Very few solutions have achieved a commercial product state. Complexities within crop environments, data sufficiency, and memory and computational demands are barriers to their successful operation in actual farming systems. CAD models that represent in-process crop states throughout a harvesting cycle should be explored. Integrating this model type with I4.0 technologies can promote data-driven harvesting practices to improve system performance.</p>
</sec>
</body>
<back>
<sec id="s9">
<title>Author contributions</title>
<p>AR: Writing&#x2013;original draft, Writing&#x2013;review and editing. JU: Supervision, Writing&#x2013;original draft, Writing&#x2013;review and editing.</p>
</sec>
<sec id="s10">
<title>Funding</title>
<p>The author(s) declare that no financial support was received for the research, authorship, and/or publication of this article.</p>
</sec>
<ack>
<p>The authors would like to acknowledge the fourth year Industrial Engineering capstone team at the University of Windsor for their work in developing an ergonomic simulation for mushroom harvesting.</p>
</ack>
<sec sec-type="COI-statement" id="s11">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s12">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<collab>Advanced Farms Technologies</collab>. <article-title>Apple harvester</article-title>. <source>Adv. Farm</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://advanced.farm/technology/apple-harvester/">https://advanced.farm/technology/apple-harvester/</ext-link> (Accessed August 5, 2023)</comment>.</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<collab>Advanced Farms Technologies</collab>,. <article-title>Strawberry harvester</article-title>. <source>Adv. Farm</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://advanced.farm/technology/strawberry-harvester/">https://advanced.farm/technology/strawberry-harvester/</ext-link> [Accessed August 5, 2023</comment>].</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Agrobot</surname>
</name>
</person-group>. <article-title>Robotic harvesters &#x7c; agrobot</article-title>. <source>Agrobot. Agric. Robots</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://www.agrobot.com/e-series">https://www.agrobot.com/e-series</ext-link> (Accessed August 5, 2023)</comment>.</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Arad</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Balendonck</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Barth</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Ben-Shahar</surname>
<given-names>O.</given-names>
</name>
<name>
<surname>Edan</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Hellstr&#xf6;m</surname>
<given-names>T.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Development of a sweet pepper harvesting robot</article-title>. <source>J. Field Robotics</source> <volume>37</volume>, <fpage>1027</fpage>&#x2013;<lpage>1039</lpage>. <pub-id pub-id-type="doi">10.1002/rob.21937</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bac</surname>
<given-names>C. W.</given-names>
</name>
<name>
<surname>van Henten</surname>
<given-names>E. J.</given-names>
</name>
<name>
<surname>Hemming</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Edan</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Harvesting robots for high&#x2010;value crops: state&#x2010;of&#x2010;the&#x2010;art review and challenges ahead</article-title>. <source>J. Field Robot.</source> <volume>31</volume>, <fpage>888</fpage>&#x2013;<lpage>911</lpage>. <pub-id pub-id-type="doi">10.1002/rob.21525</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Badeka</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Kalampokas</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Vrochidou</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Tziridis</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Papakostas</surname>
<given-names>G. A.</given-names>
</name>
<name>
<surname>Pachidis</surname>
<given-names>T. P.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>Vision-based vineyard trunk detection and its integration into a grapes harvesting robot</article-title>. <source>IJMERR</source>, <fpage>374</fpage>&#x2013;<lpage>385</lpage>. <pub-id pub-id-type="doi">10.18178/ijmerr.10.7.374-385</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bazame</surname>
<given-names>H. C.</given-names>
</name>
<name>
<surname>Molin</surname>
<given-names>J. P.</given-names>
</name>
<name>
<surname>Althoff</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Martello</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Detection, classification, and mapping of coffee fruits during harvest with computer vision</article-title>. <source>Comput. Electron. Agric.</source> <volume>183</volume>, <fpage>106066</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2021.106066</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Benavides</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Cant&#xf3;n-Garb&#xed;n</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>S&#xe1;nchez-Molina</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Rodr&#xed;guez</surname>
<given-names>F.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Automatic tomato and peduncle location system based on computer vision for use in robotized harvesting</article-title>. <source>Appl. Sci.</source> <volume>10</volume>, <fpage>5887</fpage>. <pub-id pub-id-type="doi">10.3390/app10175887</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Birrell</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Hughes</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Cai</surname>
<given-names>J. Y.</given-names>
</name>
<name>
<surname>Iida</surname>
<given-names>F.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>A field-tested robotic harvesting system for iceberg lettuce</article-title>. <source>J. Field Robotics</source> <volume>37</volume>, <fpage>225</fpage>&#x2013;<lpage>245</lpage>. <pub-id pub-id-type="doi">10.1002/rob.21888</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Brown</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Sukkarieh</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Design and evaluation of a modular robotic plum harvesting system utilizing soft components</article-title>. <source>J. Field Robotics</source> <volume>38</volume>, <fpage>289</fpage>&#x2013;<lpage>306</lpage>. <pub-id pub-id-type="doi">10.1002/rob.21987</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="web">
<person-group person-group-type="author">
<name>
<surname>Canada</surname>
<given-names>E.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Taking stock: reducing food loss and waste in Canada</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://www.canada.ca/en/environment-climate-change/services/managing-reducing-waste/food-loss-waste/taking-stock.html">https://www.canada.ca/en/environment-climate-change/services/managing-reducing-waste/food-loss-waste/taking-stock.html</ext-link> (Accessed July 29, 2023)</comment>.</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chang</surname>
<given-names>C.-L.</given-names>
</name>
<name>
<surname>Chung</surname>
<given-names>S.-C.</given-names>
</name>
<name>
<surname>Fu</surname>
<given-names>W.-L.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>C.-C.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Artificial intelligence approaches to predict growth, harvest day, and quality of lettuce (Lactuca sativa L.) in a IoT-enabled greenhouse system</article-title>. <source>Biosyst. Eng.</source> <volume>212</volume>, <fpage>77</fpage>&#x2013;<lpage>105</lpage>. <pub-id pub-id-type="doi">10.1016/j.biosystemseng.2021.09.015</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Charles</surname>
<given-names>R. Q.</given-names>
</name>
<name>
<surname>Su</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Kaichun</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Guibas</surname>
<given-names>L. J.</given-names>
</name>
</person-group> (<year>2017</year>). &#x201c;<article-title>PointNet: deep learning on point sets for 3D classification and segmentation</article-title>,&#x201d; in <conf-name>IEEE Conference on Computer Vision and Pattern Recognition</conf-name>, <conf-loc>Canada</conf-loc>, <conf-date>17-19 June 1997</conf-date> (<publisher-name>CVPR</publisher-name>), <fpage>77</fpage>. <comment>&#x2013;85</comment>. <pub-id pub-id-type="doi">10.1109/CVPR.2017.16</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname>
<given-names>L.-B.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>X.-R.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>W.-H.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Design and implementation of an artificial intelligence of things-based autonomous mobile robot system for pitaya harvesting</article-title>. <source>IEEE Sensors J.</source> <volume>23</volume>, <fpage>13220</fpage>&#x2013;<lpage>13235</lpage>. <pub-id pub-id-type="doi">10.1109/JSEN.2023.3270844</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Christiansen</surname>
<given-names>M. P.</given-names>
</name>
<name>
<surname>Laursen</surname>
<given-names>M. S.</given-names>
</name>
<name>
<surname>J&#xf8;rgensen</surname>
<given-names>R. N.</given-names>
</name>
<name>
<surname>Skovsen</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Gislum</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Designing and testing a UAV mapping system for agricultural field surveying</article-title>. <source>Sensors</source> <volume>17</volume>, <fpage>2703</fpage>. <pub-id pub-id-type="doi">10.3390/s17122703</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Comba</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Biglia</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Aimonino</surname>
<given-names>D. R.</given-names>
</name>
<name>
<surname>Barge</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Tortia</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Gay</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2019</year>). &#x201c;<article-title>2D and 3D data fusion for crop monitoring in precision agriculture</article-title>,&#x201d; in <source>IEEE international workshop on metrology for agriculture and forestry</source> (<publisher-loc>China</publisher-loc>: <publisher-name>MetroAgriFor</publisher-name>), <fpage>62</fpage>. <comment>&#x2013;67</comment>. <pub-id pub-id-type="doi">10.1109/MetroAgriFor.2019.8909219</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="web">
<person-group person-group-type="author">
<name>
<surname>Croptracker</surname>
</name>
</person-group> (<year>2023</year>). <article-title>Croptracker - farm management software</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://www.croptracker.com/product/farm-management-software.html">https://www.croptracker.com/product/farm-management-software.html</ext-link> (Accessed August 1, 2023)</comment>.</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<collab>CropX inc</collab> (<year>2022</year>). <article-title>CropX agronomic farm management system</article-title>. <source>CropX</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://cropx.com/cropx-system/">https://cropx.com/cropx-system/</ext-link> (Accessed August 1, 2023)</comment>.</citation>
</ref>
<ref id="B20">
<citation citation-type="web">
<person-group person-group-type="author">
<name>
<surname>De Clercq</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Vats</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Biel</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Agriculture 4.0 &#x2013; the future of farming technology</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://www.oliverwyman.com/our-expertise/insights/2018/feb/agriculture-4-0--the-future-of-farming-technology.html">https://www.oliverwyman.com/our-expertise/insights/2018/feb/agriculture-4-0--the-future-of-farming-technology.html</ext-link> (Accessed March 28, 2023)</comment>.</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Elijah</surname>
<given-names>O.</given-names>
</name>
<name>
<surname>Rahman</surname>
<given-names>T. A.</given-names>
</name>
<name>
<surname>Orikumhi</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Leow</surname>
<given-names>C. Y.</given-names>
</name>
<name>
<surname>Hindia</surname>
<given-names>M. N.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>An overview of internet of things (IoT) and data analytics in agriculture: benefits and challenges</article-title>. <source>IEEE Internet Things J.</source> <volume>5</volume>, <fpage>3758</fpage>&#x2013;<lpage>3773</lpage>. <pub-id pub-id-type="doi">10.1109/JIOT.2018.2844296</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Faisal</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Alsulaiman</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Arafah</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Mekhtiche</surname>
<given-names>M. A.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>IHDS: intelligent harvesting decision system for date fruit based on maturity stage using deep learning and computer vision</article-title>. <source>IEEE Access</source> <volume>8</volume>, <fpage>167985</fpage>&#x2013;<lpage>167997</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2020.3023894</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fan</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Yan</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Lei</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>F.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Three-finger grasp planning and experimental analysis of picking patterns for robotic apple harvesting</article-title>. <source>Comput. Electron. Agric.</source> <volume>188</volume>, <fpage>106353</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2021.106353</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Frisken</surname>
<given-names>S. F.</given-names>
</name>
<name>
<surname>Perry</surname>
<given-names>R. N.</given-names>
</name>
</person-group> (<year>2006</year>). <article-title>Designing with distance fields</article-title>. <source>Mitsubishi Electr. Res. Laboratories</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://www.merl.com/publications/docs/TR2006-054.pdf">https://www.merl.com/publications/docs/TR2006-054.pdf</ext-link> (Accessed August 13, 2023)</comment>.</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Goh</surname>
<given-names>B.-B.</given-names>
</name>
<name>
<surname>King</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Whetton</surname>
<given-names>R. L.</given-names>
</name>
<name>
<surname>Sattari</surname>
<given-names>S. Z.</given-names>
</name>
<name>
<surname>Holden</surname>
<given-names>N. M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Monitoring winter wheat growth performance at sub-field scale using multitemporal Sentinel-2 imagery</article-title>. <source>Int. J. Appl. Earth Observations Geoinformation</source> <volume>115</volume>, <fpage>103124</fpage>. <pub-id pub-id-type="doi">10.1016/j.jag.2022.103124</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Guo</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Hu</surname>
<given-names>Q.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Bennamoun</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Deep learning for 3D point clouds: a survey</article-title>. <source>IEEE Trans. Pattern Anal. Mach. Intell.</source> <volume>43</volume>, <fpage>4338</fpage>&#x2013;<lpage>4364</lpage>. <pub-id pub-id-type="doi">10.1109/TPAMI.2020.3005434</pub-id>
</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hassanzadeh</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Murphy</surname>
<given-names>S. P.</given-names>
</name>
<name>
<surname>Pethybridge</surname>
<given-names>S. J.</given-names>
</name>
<name>
<surname>Van Aardt</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Growth stage classification and harvest scheduling of snap bean using hyperspectral sensing: a greenhouse study</article-title>. <source>Remote Sens.</source> <volume>12</volume>, <fpage>3809</fpage>. <pub-id pub-id-type="doi">10.3390/rs12223809</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hespeler</surname>
<given-names>S. C.</given-names>
</name>
<name>
<surname>Nemati</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Dehghan-Niri</surname>
<given-names>E.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Non-destructive thermal imaging for object detection via advanced deep learning for robotic inspection and harvesting of chili peppers</article-title>. <source>Artif. Intell. Agric.</source> <volume>5</volume>, <fpage>102</fpage>&#x2013;<lpage>117</lpage>. <pub-id pub-id-type="doi">10.1016/j.aiia.2021.05.003</pub-id>
</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hisada</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Belyaev</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Kunii</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2001</year>). <article-title>A 3D voronoi-based skeleton and associated surface features</article-title>. <fpage>89</fpage>&#x2013;<lpage>96</lpage>. <pub-id pub-id-type="doi">10.1109/PCCGA.2001.962861</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hornung</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Wurm</surname>
<given-names>K. M.</given-names>
</name>
<name>
<surname>Bennewitz</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Stachniss</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Burgard</surname>
<given-names>W.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>OctoMap: an efficient probabilistic 3D mapping framework based on octrees</article-title>. <source>Auton. Robot.</source> <volume>34</volume>, <fpage>189</fpage>&#x2013;<lpage>206</lpage>. <pub-id pub-id-type="doi">10.1007/s10514-012-9321-0</pub-id>
</citation>
</ref>
<ref id="B100">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hu</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Mutlu</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Alici</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>A structural optimisation method for a soft pneumatic actuator</article-title>. <source>Robotics</source> <volume>7</volume>, <fpage>24</fpage>. <pub-id pub-id-type="doi">10.3390/robotics7020024</pub-id>
</citation>
</ref>
<ref id="B32">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Iqbal</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Xu</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Sun</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Simulation of an autonomous mobile robot for LiDAR-based in-field phenotyping and navigation</article-title>. <source>Robotics</source> <volume>9</volume>, <fpage>46</fpage>. <pub-id pub-id-type="doi">10.3390/robotics9020046</pub-id>
</citation>
</ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Javaid</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Haleem</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Singh</surname>
<given-names>R. P.</given-names>
</name>
<name>
<surname>Suman</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Enhancing smart farming through the applications of Agriculture 4.0 technologies</article-title>. <source>Int. J. Intelligent Netw.</source> <volume>3</volume>, <fpage>150</fpage>&#x2013;<lpage>164</lpage>. <pub-id pub-id-type="doi">10.1016/j.ijin.2022.09.004</pub-id>
</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jia</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Shao</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Hou</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Ji</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>G.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>FoveaMask: a fast and accurate deep learning model for green fruit instance segmentation</article-title>. <source>Comput. Electron. Agric.</source> <volume>191</volume>, <fpage>106488</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2021.106488</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jones</surname>
<given-names>M. W.</given-names>
</name>
<name>
<surname>Baerentzen</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Sramek</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2006</year>). <article-title>3D distance fields: a survey of techniques and applications</article-title>. <source>IEEE Trans. Vis. Comput. Graph.</source> <volume>12</volume>, <fpage>581</fpage>&#x2013;<lpage>599</lpage>. <pub-id pub-id-type="doi">10.1109/TVCG.2006.56</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jordan</surname>
<given-names>M. I.</given-names>
</name>
<name>
<surname>Mitchell</surname>
<given-names>T. M.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Machine learning: trends, perspectives, and prospects</article-title>. <source>Science</source> <volume>349</volume>, <fpage>255</fpage>&#x2013;<lpage>260</lpage>. <pub-id pub-id-type="doi">10.1126/science.aaa8415</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kang</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Zhou</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2020a</year>). <article-title>Visual perception and modeling for autonomous apple harvesting</article-title>. <source>IEEE Access</source> <volume>8</volume>, <fpage>62151</fpage>&#x2013;<lpage>62163</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2020.2984556</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kang</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Zhou</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2020b</year>). <article-title>Real-time fruit recognition and grasping estimation for robotic apple harvesting</article-title>. <source>Sensors</source> <volume>20</volume>, <fpage>5670</fpage>. <pub-id pub-id-type="doi">10.3390/s20195670</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Khalifeh</surname>
<given-names>A. F.</given-names>
</name>
<name>
<surname>AlQammaz</surname>
<given-names>A. Y.</given-names>
</name>
<name>
<surname>Abualigah</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Khasawneh</surname>
<given-names>A. M.</given-names>
</name>
<name>
<surname>Darabkh</surname>
<given-names>K. A.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>A machine learning-based weather prediction model and its application on smart irrigation</article-title>. <source>J. Intelligent Fuzzy Syst.</source> <volume>43</volume>, <fpage>1835</fpage>&#x2013;<lpage>1842</lpage>. <pub-id pub-id-type="doi">10.3233/JIFS-219284</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Khanna</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>M&#xf6;ller</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Pfeifer</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Liebisch</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Walter</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Siegwart</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2015</year>). <source>Beyond point clouds - 3D mapping and field parameter measurements using UAVs</source>.</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kierdorf</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Junker-Frohn</surname>
<given-names>L. V.</given-names>
</name>
<name>
<surname>Delaney</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Olave</surname>
<given-names>M. D.</given-names>
</name>
<name>
<surname>Burkart</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Jaenicke</surname>
<given-names>H.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>GrowliFlower: an image time-series dataset for GROWth analysis of cauLIFLOWER</article-title>. <source>J. Field Robotics</source> <volume>40</volume>, <fpage>173</fpage>&#x2013;<lpage>192</lpage>. <pub-id pub-id-type="doi">10.1002/rob.22122</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kim</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Hong</surname>
<given-names>S.-J.</given-names>
</name>
<name>
<surname>Ryu</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Lee</surname>
<given-names>C.-H.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Application of amodal segmentation on cucumber segmentation and occlusion recovery</article-title>. <source>Comput. Electron. Agric.</source> <volume>210</volume>, <fpage>107847</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2023.107847</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kocian</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Massa</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Cannazzaro</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Incrocci</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Di Lonardo</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Milazzo</surname>
<given-names>P.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Dynamic Bayesian network for crop growth prediction in greenhouses</article-title>. <source>Comput. Electron. Agric.</source> <volume>169</volume>, <fpage>105167</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2019.105167</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kokab</surname>
<given-names>H. S.</given-names>
</name>
<name>
<surname>Urbanic</surname>
<given-names>R. J.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Extracting of cross section profiles from complex point cloud data sets</article-title>. <source>IFAC-PapersOnLine</source> <volume>52</volume>, <fpage>346</fpage>&#x2013;<lpage>351</lpage>. <pub-id pub-id-type="doi">10.1016/j.ifacol.2019.10.055</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Langer</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Gabdulkhakova</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Kropatsch</surname>
<given-names>W. G.</given-names>
</name>
</person-group> (<year>2019</year>). &#x201c;<article-title>Non-centered Voronoi skeletons</article-title>,&#x201d; in <source>Discrete geometry for computer imagery lecture notes in computer science</source>. Editors <person-group person-group-type="editor">
<name>
<surname>Couprie</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Cousty</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Kenmochi</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Mustafa</surname>
<given-names>N.</given-names>
</name>
</person-group> (<publisher-loc>Cham</publisher-loc>: <publisher-name>Springer International Publishing</publisher-name>), <fpage>355</fpage>&#x2013;<lpage>366</lpage>. <pub-id pub-id-type="doi">10.1007/978-3-030-14085-4_28</pub-id>
</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lehnert</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>McCool</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Sa</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Perez</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Performance improvements of a sweet pepper harvesting robot in protected cropping environments</article-title>. <source>J. Field Robotics</source> <volume>37</volume>, <fpage>1197</fpage>&#x2013;<lpage>1223</lpage>. <pub-id pub-id-type="doi">10.1002/rob.21973</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Nouaze</surname>
<given-names>J. C.</given-names>
</name>
<name>
<surname>Touko Mbouembe</surname>
<given-names>P. L.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>J. H.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>YOLO-tomato: a robust algorithm for tomato detection based on YOLOv3</article-title>. <source>Sensors</source> <volume>20</volume>, <fpage>2145</fpage>. <pub-id pub-id-type="doi">10.3390/s20072145</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Ma</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Shu</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Hancke</surname>
<given-names>G. P.</given-names>
</name>
<name>
<surname>Abu-Mahfouz</surname>
<given-names>A. M.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>From industry 4.0 to agriculture 4.0: current status, enabling technologies, and research challenges</article-title>. <source>IEEE Trans. Industrial Inf.</source> <volume>17</volume>, <fpage>4322</fpage>&#x2013;<lpage>4334</lpage>. <pub-id pub-id-type="doi">10.1109/TII.2020.3003910</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Maponya</surname>
<given-names>M. G.</given-names>
</name>
<name>
<surname>van Niekerk</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Mashimbye</surname>
<given-names>Z. E.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Pre-harvest classification of crop types using a Sentinel-2 time-series and machine learning</article-title>. <source>Comput. Electron. Agric.</source> <volume>169</volume>, <fpage>105164</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2019.105164</pub-id>
</citation>
</ref>
<ref id="B50">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Marangoz</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Zaenker</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Menon</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Bennewitz</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). &#x201c;<article-title>Fruit mapping with shape completion for autonomous crop monitoring</article-title>,&#x201d; in <conf-name>IEEE 18th International Conference on Automation Science and Engineering (CASE)</conf-name>, <conf-loc>USA</conf-loc>, <conf-date>20-24 Aug. 2022</conf-date> (<publisher-name>IEEE</publisher-name>), <fpage>471</fpage>. <comment>&#x2013;476</comment>. <pub-id pub-id-type="doi">10.1109/CASE49997.2022.9926466</pub-id>
</citation>
</ref>
<ref id="B51">
<citation citation-type="web">
<collab>McKinsey and Company</collab> (<year>2022</year>). <article-title>What is IoT: the internet of things explained &#x7c; McKinsey</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://www.mckinsey.com/featured-insights/mckinsey-explainers/what-is-the-internet-of-things">https://www.mckinsey.com/featured-insights/mckinsey-explainers/what-is-the-internet-of-things</ext-link> (Accessed August 13, 2023)</comment>.</citation>
</ref>
<ref id="B52">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mekala</surname>
<given-names>M. S.</given-names>
</name>
<name>
<surname>Viswanathan</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>CLAY-MIST: IoT-cloud enabled CMM index for smart agriculture monitoring system</article-title>. <source>Measurement</source> <volume>134</volume>, <fpage>236</fpage>&#x2013;<lpage>244</lpage>. <pub-id pub-id-type="doi">10.1016/j.measurement.2018.10.072</pub-id>
</citation>
</ref>
<ref id="B53">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Miao</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Yu</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>He</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>Z.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>Efficient tomato harvesting robot based on image processing and deep learning</article-title>. <source>Precis. Agric.</source> <volume>24</volume>, <fpage>254</fpage>&#x2013;<lpage>287</lpage>. <pub-id pub-id-type="doi">10.1007/s11119-022-09944-w</pub-id>
</citation>
</ref>
<ref id="B54">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Miragaia</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Ch&#xe1;vez</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>D&#xed;az</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Vivas</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Prieto</surname>
<given-names>M. H.</given-names>
</name>
<name>
<surname>Mo&#xf1;ino</surname>
<given-names>M. J.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Plum ripeness analysis in real environments using deep learning with convolutional neural networks</article-title>. <source>Agronomy</source> <volume>11</volume>, <fpage>2353</fpage>. <pub-id pub-id-type="doi">10.3390/agronomy11112353</pub-id>
</citation>
</ref>
<ref id="B55">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mu</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Cui</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Cui</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Fu</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Gejima</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Design and simulation of an integrated end-effector for picking kiwifruit by robot</article-title>. <source>Inf. Process. Agric.</source> <volume>7</volume>, <fpage>58</fpage>&#x2013;<lpage>71</lpage>. <pub-id pub-id-type="doi">10.1016/j.inpa.2019.05.004</pub-id>
</citation>
</ref>
<ref id="B56">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>N&#xe4;f</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Sz&#xe9;kely</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Kikinis</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Shenton</surname>
<given-names>M. E.</given-names>
</name>
<name>
<surname>K&#xfc;bler</surname>
<given-names>O.</given-names>
</name>
</person-group> (<year>1997</year>). <article-title>3D Voronoi skeletons and their usage for the characterization and recognition of 3D organ shape</article-title>. <source>Comput. Vis. Image Underst.</source> <volume>66</volume>, <fpage>147</fpage>&#x2013;<lpage>161</lpage>. <pub-id pub-id-type="doi">10.1006/cviu.1997.0610</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>NVIDIA Corporation</surname>
</name>
</person-group> (<year>2020</year>). <article-title>Kaolin suite of tools</article-title>. <source>NVIDIA Dev</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://developer.nvidia.com/kaolin">https://developer.nvidia.com/kaolin</ext-link> (Accessed August 13, 2023)</comment>.</citation>
</ref>
<ref id="B103">
<citation citation-type="journal">
<collab>NX software Siemens Software</collab>. <source>Siemens digital industries software</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://plm.sw.siemens.com/en-US/nx/">https://plm.sw.siemens.com/en-US/nx/</ext-link>
</comment> (<comment>Accessed August 23, 2023</comment>).</citation>
</ref>
<ref id="B57">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Odhner</surname>
<given-names>L. U.</given-names>
</name>
<name>
<surname>Jentoft</surname>
<given-names>L. P.</given-names>
</name>
<name>
<surname>Claffee</surname>
<given-names>M. R.</given-names>
</name>
<name>
<surname>Corson</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Tenzer</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Ma</surname>
<given-names>R. R.</given-names>
</name>
<etal/>
</person-group> (<year>2014</year>). <article-title>A compliant, underactuated hand for robust manipulation</article-title>. <source>Int. J. Robotics Res.</source> <volume>33</volume>, <fpage>736</fpage>&#x2013;<lpage>752</lpage>. <pub-id pub-id-type="doi">10.1177/0278364913514466</pub-id>
</citation>
</ref>
<ref id="B58">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Ogniewicz</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Ilg</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>1992</year>). &#x201c;<article-title>Voronoi skeletons: theory and applications</article-title>,&#x201d; in <source>Proceedings 1992 IEEE computer society conference on computer</source> (<publisher-loc>USA</publisher-loc>: <publisher-name>Vision and Pattern Recognition</publisher-name>), <fpage>63</fpage>&#x2013;<lpage>69</lpage>. <pub-id pub-id-type="doi">10.1109/CVPR.1992.223226</pub-id>
</citation>
</ref>
<ref id="B59">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Oleynikova</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Taylor</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Fehr</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Siegwart</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Nieto</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Voxblox: incremental 3D euclidean signed distance fields for on-board MAV planning</article-title>. <source>IEEE/RSJ Int. Conf. Intelligent Robots Syst. (IROS)</source>, <fpage>1366</fpage>. <pub-id pub-id-type="doi">10.1109/IROS.2017.8202315</pub-id>
</citation>
</ref>
<ref id="B60">
<citation citation-type="web">
<person-group person-group-type="author">
<name>
<surname>Open Source Robotics Foundation</surname>
</name>
</person-group> <article-title>Gazebo</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://gazebosim.org/home">https://gazebosim.org/home</ext-link> (Accessed August 17, 2023)</comment>.</citation>
</ref>
<ref id="B31">
<citation citation-type="web">
<collab>Panasonic</collab> (<year>2018</year>). <article-title>Introducing AI-equipped tomato harvesting robots to farms may help to create Jobs &#x7c; business solutions &#x7c; products &#x0026; solutions &#x7c; feature story</article-title>. <source>Panasonic Newsroom Global.</source> <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://news.panasonic.com/global/stories/814">https://news.panasonic.com/global/stories/814</ext-link> (Accessed August 5, 2023)</comment>.</citation>
</ref>
<ref id="B61">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Paunov</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Satorra</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>HOW ARE DIGITAL TECHNOLOGIES CHANGING INNOVATION?</article-title> <pub-id pub-id-type="doi">10.1787/67bbcafe-en</pub-id>
</citation>
</ref>
<ref id="B62">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Peelar</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Urbanic</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Hedrick</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Rueda</surname>
<given-names>L.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Real-time visualization of bead based additive manufacturing toolpaths using implicit boundary representations</article-title>. <source>CAD&#x26;A</source> <volume>16</volume>, <fpage>904</fpage>&#x2013;<lpage>922</lpage>. <pub-id pub-id-type="doi">10.14733/cadaps.2019.904-922</pub-id>
</citation>
</ref>
<ref id="B63">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Praveen Kumar</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Amgoth</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Annavarapu</surname>
<given-names>C. S. R.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Machine learning algorithms for wireless sensor networks: a survey</article-title>. <source>Inf. Fusion</source> <volume>49</volume>, <fpage>1</fpage>&#x2013;<lpage>25</lpage>. <pub-id pub-id-type="doi">10.1016/j.inffus.2018.09.013</pub-id>
</citation>
</ref>
<ref id="B64">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Recchia</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Strelkova</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Urbanic</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Kim</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Anwar</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Murugan</surname>
<given-names>A. S.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>A prototype pick and place solution for harvesting white button mushrooms using a collaborative robot</article-title>. <source>Robot. Rep.</source> <volume>1 (1)</volume>, <fpage>67</fpage>&#x2013;<lpage>81</lpage>. <pub-id pub-id-type="doi">10.1089/rorep.2023.0016</pub-id>
</citation>
</ref>
<ref id="B65">
<citation citation-type="web">
<collab>Ridder and MetoMotion</collab>. <article-title>GRoW tomato harvesting robot &#x7c; for picking, collecting and boxing your tomatoes</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://grow.ridder.com">https://grow.ridder.com</ext-link> (Accessed August 5, 2023)</comment>.</citation>
</ref>
<ref id="B66">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rodr&#xed;guez</surname>
<given-names>J. P.</given-names>
</name>
<name>
<surname>Montoya-Munoz</surname>
<given-names>A. I.</given-names>
</name>
<name>
<surname>Rodriguez-Pabon</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Hoyos</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Corrales</surname>
<given-names>J. C.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>IoT-Agro: a smart farming system to Colombian coffee farms</article-title>. <source>Comput. Electron. Agric.</source> <volume>190</volume>, <fpage>106442</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2021.106442</pub-id>
</citation>
</ref>
<ref id="B67">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rodr&#xed;guez-Robles</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Martin</surname>
<given-names>&#xc1;.</given-names>
</name>
<name>
<surname>Martin</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Ruip&#xe9;rez-Valiente</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Castro</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Autonomous sensor network for rural agriculture environments, low cost, and energy self-charge</article-title>. <source>Sustainability</source> <volume>12</volume>, <fpage>5913</fpage>. <pub-id pub-id-type="doi">10.3390/su12155913</pub-id>
</citation>
</ref>
<ref id="B68">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rus</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Tolley</surname>
<given-names>M. T.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Design, fabrication and control of soft robots</article-title>. <source>Nature</source> <volume>521</volume>, <fpage>467</fpage>&#x2013;<lpage>475</lpage>. <pub-id pub-id-type="doi">10.1038/nature14543</pub-id>
</citation>
</ref>
<ref id="B69">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Ryan</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2023</year>). <source>Labour and skills shortages in the agro-food sector</source>. <publisher-loc>Paris</publisher-loc>: <publisher-name>OECD</publisher-name>. <pub-id pub-id-type="doi">10.1787/ed758aab-en</pub-id>
</citation>
</ref>
<ref id="B70">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sadowski</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Spachos</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Wireless technologies for smart agricultural monitoring using internet of things devices with energy harvesting capabilities</article-title>. <source>Comput. Electron. Agric.</source> <volume>172</volume>, <fpage>105338</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2020.105338</pub-id>
</citation>
</ref>
<ref id="B71">
<citation citation-type="journal">
<collab>Semios</collab> (<year>2023</year>). <article-title>Discover our crop management solutions</article-title>. <source>Semios</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://semios.com/solutions/">https://semios.com/solutions/</ext-link> (Accessed August 1, 2023)</comment>.</citation>
</ref>
<ref id="B72">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sep&#xfa;Lveda</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Fern&#xe1;ndez</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Navas</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Armada</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Gonz&#xe1;lez-De-Santos</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Robotic aubergine harvesting using dual-arm manipulation</article-title>. <source>IEEE Access</source> <volume>8</volume>, <fpage>121889</fpage>&#x2013;<lpage>121904</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2020.3006919</pub-id>
</citation>
</ref>
<ref id="B73">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shafi</surname>
<given-names>U.</given-names>
</name>
<name>
<surname>Mumtaz</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Garc&#xed;a-Nieto</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Hassan</surname>
<given-names>S. A.</given-names>
</name>
<name>
<surname>Zaidi</surname>
<given-names>S. A. R.</given-names>
</name>
<name>
<surname>Iqbal</surname>
<given-names>N.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Precision agriculture techniques and practices: from considerations to applications</article-title>. <source>Sensors</source> <volume>19</volume>, <fpage>3796</fpage>. <pub-id pub-id-type="doi">10.3390/s19173796</pub-id>
</citation>
</ref>
<ref id="B74">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shi</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>An</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Zhao</surname>
<given-names>Q.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Xia</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Sun</surname>
<given-names>X.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>State-of-the-Art internet of things in protected agriculture</article-title>. <source>Sensors</source> <volume>19</volume>, <fpage>1833</fpage>. <pub-id pub-id-type="doi">10.3390/s19081833</pub-id>
</citation>
</ref>
<ref id="B75">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shintake</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Cacucciolo</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Floreano</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Shea</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Soft robotic grippers</article-title>. <source>Adv. Mater.</source> <volume>30</volume>, <fpage>1707035</fpage>. <pub-id pub-id-type="doi">10.1002/adma.201707035</pub-id>
</citation>
</ref>
<ref id="B76">
<citation citation-type="web">
<collab>Siemens Industry Software</collab> (<year>2011</year>). <article-title>Siemens industry software</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://www.plm.automation.siemens.com/media/store/en_us/4917_tcm1023-4952_tcm29-1992.pdf">https://www.plm.automation.siemens.com/media/store/en_us/4917_tcm1023-4952_tcm29-1992.pdf</ext-link> (Accessed August 12, 2023)</comment>.</citation>
</ref>
<ref id="B101">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sishodia</surname>
<given-names>R. P.</given-names>
</name>
<name>
<surname>Ray</surname>
<given-names>R. L.</given-names>
</name>
<name>
<surname>Singh</surname>
<given-names>S. K.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Applications of remote sensing in precision agriculture: a review</article-title>. <source>Remote Sens.</source> <volume>12</volume>, <fpage>3136</fpage>. <pub-id pub-id-type="doi">10.3390/rs12193136</pub-id>
</citation>
</ref>
<ref id="B77">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Stepanova</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Pham</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Panthi</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Zourmand</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Christophe</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Laakkonen</surname>
<given-names>M.-P.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>Harvesting tomatoes with a Robot: an evaluation of Computer-Vision capabilities</article-title>. <source>IEEE Int. Conf. Aut. Robot Syst. Compet. (ICARSC)</source>, <fpage>63</fpage>. <comment>&#x2013;68</comment>. <pub-id pub-id-type="doi">10.1109/ICARSC58346.2023.10129601</pub-id>
</citation>
</ref>
<ref id="B78">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ta&#x15f;an</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Cemek</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Ta&#x15f;an</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Cant&#xfc;rk</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Estimation of eggplant yield with machine learning methods using spectral vegetation indices</article-title>. <source>Comput. Electron. Agric.</source> <volume>202</volume>, <fpage>107367</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2022.107367</pub-id>
</citation>
</ref>
<ref id="B79">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tatsumi</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Igarashi</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Mengxue</surname>
<given-names>X.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Prediction of plant-level tomato biomass and yield using machine learning with unmanned aerial vehicle imagery</article-title>. <source>Plant Methods</source> <volume>17</volume>, <fpage>77</fpage>. <pub-id pub-id-type="doi">10.1186/s13007-021-00761-2</pub-id>
</citation>
</ref>
<ref id="B80">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tesfaye</surname>
<given-names>A. A.</given-names>
</name>
<name>
<surname>Osgood</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Aweke</surname>
<given-names>B. G.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Combining machine learning, space-time cloud restoration and phenology for farm-level wheat yield prediction</article-title>. <source>Artif. Intell. Agric.</source> <volume>5</volume>, <fpage>208</fpage>&#x2013;<lpage>222</lpage>. <pub-id pub-id-type="doi">10.1016/j.aiia.2021.10.002</pub-id>
</citation>
</ref>
<ref id="B81">
<citation citation-type="web">
<collab>The AnyLogic Company</collab>. <article-title>AnyLogic</article-title>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://www.anylogic.com/features/">https://www.anylogic.com/features/</ext-link>(Accessed August 17, 2023)</comment>.</citation>
</ref>
<ref id="B82">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Thilakarathne</surname>
<given-names>N. N.</given-names>
</name>
<name>
<surname>Bakar</surname>
<given-names>M. S. A.</given-names>
</name>
<name>
<surname>Abas</surname>
<given-names>P. E.</given-names>
</name>
<name>
<surname>Yassin</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Towards making the fields talks: a real-time cloud enabled IoT crop management platform for smart agriculture</article-title>. <source>Front. Plant Sci.</source> <volume>13</volume>, <fpage>1030168</fpage>. <pub-id pub-id-type="doi">10.3389/fpls.2022.1030168</pub-id>
</citation>
</ref>
<ref id="B83">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Torgbor</surname>
<given-names>B. A.</given-names>
</name>
<name>
<surname>Rahman</surname>
<given-names>M. M.</given-names>
</name>
<name>
<surname>Brinkhoff</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Sinha</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Robson</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Integrating remote sensing and weather variables for mango yield prediction using a machine learning approach</article-title>. <source>Remote Sens.</source> <volume>15</volume>, <fpage>3075</fpage>. <pub-id pub-id-type="doi">10.3390/rs15123075</pub-id>
</citation>
</ref>
<ref id="B84">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Urbanic</surname>
<given-names>R. J.</given-names>
</name>
<name>
<surname>Elmaraghy</surname>
<given-names>W. H.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>Design recovery of internal and external features for mechanical components</article-title>. <source>Virtual Phys. Prototyp.</source> <volume>3</volume>, <fpage>61</fpage>&#x2013;<lpage>83</lpage>. <pub-id pub-id-type="doi">10.1080/17452750802078698</pub-id>
</citation>
</ref>
<ref id="B85">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>V&#xe1;rady</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Martin</surname>
<given-names>R. R.</given-names>
</name>
<name>
<surname>Cox</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>1997</year>). <article-title>Reverse engineering of geometric models&#x2014;an introduction</article-title>. <source>Computer-Aided Des.</source> <volume>29</volume>, <fpage>255</fpage>&#x2013;<lpage>268</lpage>. <pub-id pub-id-type="doi">10.1016/S0010-4485(96)00054-1</pub-id>
</citation>
</ref>
<ref id="B86">
<citation citation-type="book">
<collab>VCMI</collab> (<year>2019</year>). <source>The avoidable crisis of food waste: technical report</source>.</citation>
</ref>
<ref id="B87">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Vijayakumar</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Costa</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Ampatzidis</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2021</year>). &#x201c;<article-title>Prediction of citrus yield with AI using ground-based fruit detection and UAV imagery</article-title>,&#x201d; in <source>2021 ASABE annual international virtual meeting</source> (<publisher-loc>American</publisher-loc>: <publisher-name>American Society of Agricultural and Biological Engineers</publisher-name>). <pub-id pub-id-type="doi">10.13031/aim.202100493</pub-id>
</citation>
</ref>
<ref id="B88">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Luo</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Zhu</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>W.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Swingd: a robust grape bunch detection model based on swin transformer in complex vineyard environment</article-title>. <source>Horticulturae</source> <volume>7</volume>, <fpage>492</fpage>. <pub-id pub-id-type="doi">10.3390/horticulturae7110492</pub-id>
</citation>
</ref>
<ref id="B89">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Kang</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Zhou</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Au</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>M. Y.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Development and evaluation of a robust soft robotic gripper for apple harvesting</article-title>. <source>Comput. Electron. Agric.</source> <volume>204</volume>, <fpage>107552</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2022.107552</pub-id>
</citation>
</ref>
<ref id="B90">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Weyler</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Milioto</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Falck</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Behley</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Stachniss</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Joint plant instance detection and leaf count estimation for in-field plant phenotyping</article-title>. <source>IEEE Robotics Automation Lett.</source> <volume>6</volume>, <fpage>3599</fpage>&#x2013;<lpage>3606</lpage>. <pub-id pub-id-type="doi">10.1109/LRA.2021.3060712</pub-id>
</citation>
</ref>
<ref id="B91">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Williams</surname>
<given-names>H. A. M.</given-names>
</name>
<name>
<surname>Jones</surname>
<given-names>M. H.</given-names>
</name>
<name>
<surname>Nejati</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Seabright</surname>
<given-names>M. J.</given-names>
</name>
<name>
<surname>Bell</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Penhall</surname>
<given-names>N. D.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>Robotic kiwifruit harvesting using machine vision, convolutional neural networks, and robotic arms</article-title>. <source>Biosyst. Eng.</source> <volume>181</volume>, <fpage>140</fpage>&#x2013;<lpage>156</lpage>. <pub-id pub-id-type="doi">10.1016/j.biosystemseng.2019.03.007</pub-id>
</citation>
</ref>
<ref id="B92">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wise</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Wedding</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Selby-Pham</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Application of automated image colour analyses for the early-prediction of strawberry development and quality</article-title>. <source>Sci. Hortic.</source> <volume>304</volume>, <fpage>111316</fpage>. <pub-id pub-id-type="doi">10.1016/j.scienta.2022.111316</pub-id>
</citation>
</ref>
<ref id="B93">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xiong</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Peng</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Grimstad</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>From</surname>
<given-names>P. J.</given-names>
</name>
<name>
<surname>Isler</surname>
<given-names>V.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Development and field evaluation of a strawberry harvesting robot with a cable-driven gripper</article-title>. <source>Comput. Electron. Agric.</source> <volume>157</volume>, <fpage>392</fpage>&#x2013;<lpage>402</lpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2019.01.009</pub-id>
</citation>
</ref>
<ref id="B94">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xu</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Yu</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Niu</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Zhao</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Design and experiment of an underactuated broccoli-picking manipulator</article-title>. <source>Agriculture</source> <volume>13</volume>, <fpage>848</fpage>. <pub-id pub-id-type="doi">10.3390/agriculture13040848</pub-id>
</citation>
</ref>
<ref id="B95">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yin</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Sun</surname>
<given-names>Q.</given-names>
</name>
<name>
<surname>Ren</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Guo</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Wei</surname>
<given-names>Y.</given-names>
</name>
<etal/>
</person-group> (<year>2023</year>). <article-title>Development, integration, and field evaluation of an autonomous citrus-harvesting robot</article-title>. <source>
<italic>J. Field Robotics</italic> n/a</source> <volume>40</volume>, <fpage>1363</fpage>&#x2013;<lpage>1387</lpage>. <pub-id pub-id-type="doi">10.1002/rob.22178</pub-id>
</citation>
</ref>
<ref id="B96">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yu</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Fan</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Wan</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Zeng</surname>
<given-names>X.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>A lab-customized autonomous humanoid apple harvesting robot</article-title>. <source>Comput. Electr. Eng.</source> <volume>96</volume>, <fpage>107459</fpage>. <pub-id pub-id-type="doi">10.1016/j.compeleceng.2021.107459</pub-id>
</citation>
</ref>
<ref id="B97">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Fruit detection for strawberry harvesting robot in non-structural environment based on Mask-RCNN</article-title>. <source>Comput. Electron. Agric.</source> <volume>163</volume>, <fpage>104846</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2019.06.001</pub-id>
</citation>
</ref>
<ref id="B98">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zamora-Izquierdo</surname>
<given-names>M. A.</given-names>
</name>
<name>
<surname>Santa</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Mart&#xed;nez</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Mart&#xed;nez</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Skarmeta</surname>
<given-names>A. F.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Smart farming IoT platform based on edge and cloud computing</article-title>. <source>Biosyst. Eng.</source> <volume>177</volume>, <fpage>4</fpage>&#x2013;<lpage>17</lpage>. <pub-id pub-id-type="doi">10.1016/j.biosystemseng.2018.10.014</pub-id>
</citation>
</ref>
<ref id="B99">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhu</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Wu</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Hu</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Gong</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Spatial location of sugarcane node for binocular vision-based harvesting robots based on improved YOLOv4</article-title>. <source>Appl. Sci. Switz.</source> <volume>12</volume>, <fpage>3088</fpage>. <pub-id pub-id-type="doi">10.3390/app12063088</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>