<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2024.1358360</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Plant Science</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Advanced deep learning models for phenotypic trait extraction and cultivar classification in lychee using photon-counting micro-CT imaging</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" equal-contrib="yes">
<name>
<surname>Xue</surname>
<given-names>Mengjia</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn003">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2609199"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
</contrib>
<contrib contrib-type="author" equal-contrib="yes">
<name>
<surname>Huang</surname>
<given-names>Siyi</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn003">
<sup>&#x2020;</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2581469"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Xu</surname>
<given-names>Wenting</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/2614066"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Xie</surname>
<given-names>Tianwu</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1131258"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>Institute of Radiation Medicine, Fudan University</institution>, <addr-line>Shanghai</addr-line>, <country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Britton Chance Center for Biomedical Photonics, Wuhan National Laboratory for Optoelectronics, Huazhong University of Science and Technology</institution>, <addr-line>Wuhan, Hubei</addr-line>, <country>China</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited by: Andr&#xe9;s J. Cort&#xe9;s, Colombian Corporation for Agricultural Research (AGROSAVIA), Colombia</p>
</fn>
<fn fn-type="edited-by">
<p>Reviewed by: Satoru Tsuchikawa, Nagoya University, Japan</p>
<p>Juan David Beltran, ECOM Agroindustrial Corp. Limited., United Kingdom</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Tianwu Xie, <email xlink:href="mailto:tianwuxie@fudan.edu.cn">tianwuxie@fudan.edu.cn</email>
</p>
</fn>
<fn fn-type="equal" id="fn003">
<p>&#x2020;These authors have contributed equally to this work</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>29</day>
<month>02</month>
<year>2024</year>
</pub-date>
<pub-date pub-type="collection">
<year>2024</year>
</pub-date>
<volume>15</volume>
<elocation-id>1358360</elocation-id>
<history>
<date date-type="received">
<day>19</day>
<month>12</month>
<year>2023</year>
</date>
<date date-type="accepted">
<day>12</day>
<month>02</month>
<year>2024</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2024 Xue, Huang, Xu and Xie</copyright-statement>
<copyright-year>2024</copyright-year>
<copyright-holder>Xue, Huang, Xu and Xie</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<sec>
<title>Introduction</title>
<p>In contemporary agronomic research, the focus has increasingly shifted towards non-destructive imaging and precise phenotypic characterization. A photon-counting micro-CT system has been developed, which is capable of imaging lychee fruit at the micrometer level and capturing a full energy spectrum, thanks to its advanced photon-counting detectors.</p>
</sec>
<sec>
<title>Methods</title>
<p>For automatic measurement of phenotypic traits, seven CNN-based deep learning models including AttentionUNet, DeeplabV3+, SegNet, TransUNet, UNet, UNet++, and UNet3+ were developed. Machine learning techniques tailored for small-sample training were employed to identify key characteristics of various lychee species.</p>
</sec>
<sec>
<title>Results</title>
<p>These models demonstrate outstanding performance with Dice, Recall, and Precision indices predominantly ranging between 0.90 and 0.99. The Mean Intersection over Union (MIoU) consistently falls between 0.88 and 0.98. This approach served both as a feature selection process and a means of classification, significantly enhancing the study's ability to discern and categorize distinct lychee varieties.</p>
</sec>
<sec>
<title>Discussion</title>
<p>This research not only contributes to the advancement of non-destructive plant analysis but also opens new avenues for exploring the intricate phenotypic variations within plant species.</p>
</sec>
</abstract>
<kwd-group>
<kwd>plant phenomics</kwd>
<kwd>micro-CT</kwd>
<kwd>lychee phenotypic traits</kwd>
<kwd>deep learning</kwd>
<kwd>non-destructive</kwd>
</kwd-group>
<contract-num rid="cn001">32227801</contract-num>
<contract-sponsor id="cn001">National Natural Science Foundation of China<named-content content-type="fundref-id">10.13039/501100001809</named-content>
</contract-sponsor>
<counts>
<fig-count count="10"/>
<table-count count="2"/>
<equation-count count="0"/>
<ref-count count="25"/>
<page-count count="11"/>
<word-count count="3887"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Plant Breeding</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<label>1</label>
<title>Introduction</title>
<p>Lychee (Litchi chinensis Sonn.) is a subtropical fruit enjoyed for its unique taste and nutritional benefits, primarily grown in parts of Asia (<xref ref-type="bibr" rid="B14">Menzel, 1985</xref>). Recent research highlights lychee&#x2019;s potential health properties, including fighting cancer and bacteria, as well as its antioxidant capabilities (<xref ref-type="bibr" rid="B24">Zhang et&#xa0;al., 2021</xref>). As interest grows in how genes affect lychee traits, some studies are uncovering errors in cultivar naming (<xref ref-type="bibr" rid="B12">Liu and Mei, 2005</xref>) and using new genetic markers for plant diversity studies (<xref ref-type="bibr" rid="B20">Tran et&#xa0;al., 2019</xref>).</p>
<p>New technologies like micro-CT scans are transforming how we study plants, allowing us to see the details of plant structures without destroying them (<xref ref-type="bibr" rid="B9">Keklikoglou et&#xa0;al., 2021</xref>). This method is proving useful for examining plant parts like roots in their natural soil environment (<xref ref-type="bibr" rid="B10">Kurogane et&#xa0;al., 2021</xref>) and for studying flower shapes more efficiently (<xref ref-type="bibr" rid="B2">Begot et&#xa0;al., 2022</xref>). Yet, to fully understand plants, further interests need to move from looking at images to measuring them accurately (<xref ref-type="bibr" rid="B8">Karahara et&#xa0;al., 2023</xref>).</p>
<p>Deep learning has been utilized to measure plant traits for better crop breeding (<xref ref-type="bibr" rid="B21">Wu et&#xa0;al., 2021</xref>), assisting in harvesting fruits like lychee (<xref ref-type="bibr" rid="B22">Xie et&#xa0;al., 2022</xref>), and even works on portable devices in the field (<xref ref-type="bibr" rid="B6">Jiao et&#xa0;al., 2022</xref>). Researchers have also made it easier to tell different lychee cultivars apart just by their shape (<xref ref-type="bibr" rid="B17">Osako et&#xa0;al., 2020</xref>). Not only can it help identifying plants from complex environment, but also can rebuilt 3D models and get phenotypic traits when analyzing CT images of the plant. A nondestructive method for more accurate and efficient automatic acquisition of comprehensive phenotypic traits has been developed in the passion fruit (<xref ref-type="bibr" rid="B13">Lu et&#xa0;al., 2023</xref>) and the coconut (<xref ref-type="bibr" rid="B23">Yu et&#xa0;al., 2022</xref>).</p>
<p>In this study, a state-of-the-art photon-counting micro-CT technology was used to investigate the internal structure of lychees, which is the first attempt as far as we know. Then seven CNN-based deep learning models were utilized to segment the CT images and extract phenotypic traits automatically. We compared the algorithm-driven approach with manual analysis to see whether the approach well worked. To examine which phenotypic traits are the key characteristics of a lychee, machine learning based on small-sample training is used for feature selection and classification.</p>
</sec>
<sec id="s2">
<label>2</label>
<title>Methods</title>
<sec id="s2_1">
<label>2.1</label>
<title>Materials</title>
<p>In this study, 80 lychees (10 individuals per species) were randomly selected from eight distinct species for photon-counting micro-CT imaging and manual measurements. The species sampled were carefully chosen to represent a wide range of lychee varieties cultivated in China. These included: 1. &#x2018;Xian Jin Feng&#x2019; from Guangzhou, Guangdong; 2. &#x2018;Gui Wei&#x2019; from Zhanjiang, Guangdong; 3. &#x2018;Jing Gang Hong Nuo&#x2019; from Guangzhou, Guangdong; 4. &#x2018;Nuo Mi Ci&#x2019; from Guangdong; 5. &#x2018;Fei Zi Xiao&#x2019; from Guangdong; 6. &#x2018;Yu He Bao&#x2019; from Yangjiang, Guangdong, noted for its larger seed size; 7. &#x2018;Ji Zui&#x2019; from Zhanjiang, Guangdong; 8. &#x2018;Hei Ye&#x2019; from Fujian. The samples were meticulously collected to ensure they accurately represented each variety&#x2019;s unique traits. <xref ref-type="fig" rid="f1">
<bold>Figure&#xa0;1</bold>
</xref> presents eight different species of lychee, each row showing a unique type through real, modeled, and cross-sectional images.</p>
<fig id="f1" position="float">
<label>Figure&#xa0;1</label>
<caption>
<p>Four morphologically representative lychee samples. <bold>(A)</bold> RGB images of lychees. <bold>(B)</bold> 3D NURBS model of lychee constructed from Micro-CT. <bold>(C)</bold> Cross-sectional view of lychee. <bold>(D)</bold> Tomogram images of lychee cross-section from Micro-CT.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g001.tif"/>
</fig>
</sec>
<sec id="s2_2">
<label>2.2</label>
<title>Photon-counting micro-CT imaging</title>
<p>To our knowledge, it represents the first instance of such advanced technology being utilized in agricultural field. The research employed a state-of-the-art photon-counting micro-computed tomography system to acquire images, engineered for acquiring high-resolution images with a voxel resolution finer than 100 micrometers, as depicted in <xref ref-type="fig" rid="f2">
<bold>Figure&#xa0;2</bold>
</xref>. This imaging system is able to reveal intricate details that are not discernible through traditional CT imaging. The lychee samples are placed on the sample platforms situated between the Microfocus X-ray source (MFX) and the photon-counting detectors. During the scanning process, both the MFX and the detectors rotate around the sample. This movement is crucial as it enables the system to capture a multitude of projection images from various angles, effectively &#x2018;slicing&#x2019; through the sample in a non-destructive manner. This advanced imaging technique provided us with detailed crosssectional images of the lychee fruit, enabling precise measurements and analysis of internal structures that are otherwise difficult to assess with traditional methods. It is not only allowed us to visualize the internal architecture of the lychee fruits, but also to construct three-dimensional models for a more comprehensive evaluation of their physical attributes.</p>
<fig id="f2" position="float">
<label>Figure&#xa0;2</label>
<caption>
<p>Photon-counting micro-CT scanner design. By rotational scanning the lychee samples, the MFX and the photon-counting detectors transmit data to the Console PC, which then generates CT images through pre-processing and reconstruction steps.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g002.tif"/>
</fig>
</sec>
<sec id="s2_3">
<label>2.3</label>
<title>Segmentation models</title>
<p>The research utilized seven CNN-based deep learning models, including AttentionUNet (<xref ref-type="bibr" rid="B15">Mishra et&#xa0;al., 2018</xref>), DeeplabV3+ (<xref ref-type="bibr" rid="B4">da Cruz et&#xa0;al., 2022</xref>), SegNet (<xref ref-type="bibr" rid="B1">Alqazzaz et&#xa0;al., 2019</xref>), TransUNet (<xref ref-type="bibr" rid="B3">Chen et&#xa0;al., 2021</xref>), UNet (<xref ref-type="bibr" rid="B19">Ronneberger et&#xa0;al., 2015</xref>), UNet++ (<xref ref-type="bibr" rid="B25">Zhou et&#xa0;al., 2019</xref>), and UNet3+ (<xref ref-type="bibr" rid="B5">Huang et&#xa0;al., 2020</xref>), to segment CT images and effectively identified the kernel, pulp, endocarp and epicarp parts of a lychee. Accurate segmentation by these models is fundamental for analyzing the lychee&#x2019;s internal features, which is essential for understanding its physical traits.</p>
<p>The UNet model is a primary tool in our study due to its efficient design that captures important features in the images and then reconstructs a segmented image that includes all necessary details. SegNet enhances this process by using a special technique that improves the model&#x2019;s ability to capture and recreate finer details. DeeplabV3+ goes a step further by processing different image sizes effectively, which is important when dealing with various parts of the lychee. Attention UNet, UNet++, and UNet3+ build on this by focusing on the most important features in the image and combining information at different scales to create a more accurate picture. TransUNet stands out by blending the strengths of two architectures to provide a balance between a broad view and detailed aspects of the images.</p>
<p>The study processed a total of 6480 images, about 810 images for each lychee species. The dataset was then randomly divided, with 80% allocated for training models and the remaining 20% reserved for validation purposes. To define the performance of each CNN-based model in comparison to ground truth segmentations, the following four quantitative indicators were used: mean intersection over union (mIOU), dice similarity coefficient (DSC), precision, and recall.</p>
</sec>
<sec id="s2_4">
<label>2.4</label>
<title>Trait extraction approach</title>
<p>Following the delineation of the lychee&#x2019;s kernel, pulp, endocarp, and epicarp, morphological characteristics of the fruit were meticulously extracted. Key morphological traits examined in this study are concisely summarized in a dedicated table. Quantitative data for these traits have been rigorously compiled, as evident in the &#x2018;Values&#x2019; column of <xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref>. The measurements were initially obtained manually to establish a baseline for subsequent comparison.</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Extraction of morphological traits from lychee CT images.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" rowspan="2" align="center">Traits Class</th>
<th valign="bottom" colspan="2" align="center">Morphological Traits</th>
<th valign="bottom" colspan="2" align="center">Values</th>
<th valign="middle" rowspan="2" align="center">Unit</th>
</tr>
<tr>
<th valign="bottom" align="center">Description</th>
<th valign="bottom" align="center">Abbreviation</th>
<th valign="bottom" align="center">Mean &#xb1; SD</th>
<th valign="bottom" align="center">Range</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="bottom" rowspan="4" align="left">Lychee Fruit Traits</td>
<td valign="bottom" align="left">Fruit Weight</td>
<td valign="bottom" align="left">
<italic>W_Fruit</italic>
</td>
<td valign="bottom" align="left">24.40 &#xb1; 3.76</td>
<td valign="bottom" align="left">18.43 ~ 35.13</td>
<td valign="bottom" align="left">g</td>
</tr>
<tr>
<td valign="bottom" align="left">Fruit Length</td>
<td valign="bottom" align="left">
<italic>L_Fruit</italic>
</td>
<td valign="bottom" align="left">37.51 &#xb1; 2.81</td>
<td valign="bottom" align="left">31.54 ~ 46.64</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" align="left">Fruit Width</td>
<td valign="bottom" align="left">
<italic>D_Fruit</italic>
</td>
<td valign="bottom" align="left">35.12 &#xb1; 2.24</td>
<td valign="bottom" align="left">30.25 ~ 40.64</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" align="left">Fruit Height</td>
<td valign="bottom" align="left">
<italic>H_Fruit</italic>
</td>
<td valign="bottom" align="left">36.03 &#xb1; 2.35</td>
<td valign="bottom" align="left">32.29 ~ 46.51</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" rowspan="4" align="left">Kernel Traits</td>
<td valign="bottom" align="left">Kernel Weight</td>
<td valign="bottom" align="left">
<italic>W_Kernel</italic>
</td>
<td valign="bottom" align="left">1.17 &#xb1; 0.93</td>
<td valign="bottom" align="left">0.24 ~ 3.65</td>
<td valign="bottom" align="left">g</td>
</tr>
<tr>
<td valign="bottom" align="left">Kernel Length</td>
<td valign="bottom" align="left">
<italic>L_Kernel</italic>
</td>
<td valign="bottom" align="left">10.89 &#xb1; 2.03</td>
<td valign="bottom" align="left">6.50 ~ 16.22</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" align="left">Kernel Width</td>
<td valign="bottom" align="left">
<italic>D_Kernel</italic>
</td>
<td valign="bottom" align="left">8.94 &#xb1; 2.56</td>
<td valign="bottom" align="left">5.21 ~ 13.35</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" align="left">Kernel Height</td>
<td valign="bottom" align="left">
<italic>H_Kernel</italic>
</td>
<td valign="bottom" align="left">17.49 &#xb1; 3.02</td>
<td valign="bottom" align="left">12.00 ~ 24.43</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" rowspan="4" align="left">Pulps Traits</td>
<td valign="bottom" align="left">Pulps Weight</td>
<td valign="bottom" align="left">
<italic>W_Pulps</italic>
</td>
<td valign="bottom" align="left">4.84 &#xb1; 0.95</td>
<td valign="bottom" align="left">2.72 ~ 7.96</td>
<td valign="bottom" align="left">g</td>
</tr>
<tr>
<td valign="bottom" align="left">Pulps Length</td>
<td valign="bottom" align="left">
<italic>L_Pulps</italic>
</td>
<td valign="bottom" align="left">31.24 &#xb1; 2.18</td>
<td valign="bottom" align="left">27.72 ~ 38.15</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" align="left">Pulps Width</td>
<td valign="bottom" align="left">
<italic>D_Pulps</italic>
</td>
<td valign="bottom" align="left">32.14 &#xb1; 2.37</td>
<td valign="bottom" align="left">27.43 ~ 39.43</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" align="left">Pulps Height</td>
<td valign="bottom" align="left">
<italic>H_Pulps</italic>
</td>
<td valign="bottom" align="left">32.46 &#xb1; 2.45</td>
<td valign="bottom" align="left">27.43 ~ 39.34</td>
<td valign="bottom" align="left">mm</td>
</tr>
<tr>
<td valign="bottom" rowspan="4" align="left">Others</td>
<td valign="bottom" align="left">RGB Color - R</td>
<td valign="bottom" align="left">R</td>
<td valign="bottom" align="left">107 &#xb1; 13</td>
<td valign="bottom" align="left">84 ~ 142</td>
<td valign="bottom" align="left">&#x2013;</td>
</tr>
<tr>
<td valign="bottom" align="left">RGB Color - G</td>
<td valign="bottom" align="left">G</td>
<td valign="bottom" align="left">55 &#xb1; 13</td>
<td valign="bottom" align="left">33 ~ 100</td>
<td valign="bottom" align="left">&#x2013;</td>
</tr>
<tr>
<td valign="bottom" align="left">RGB Color - B</td>
<td valign="bottom" align="left">B</td>
<td valign="bottom" align="left">26 &#xb1; 4</td>
<td valign="bottom" align="left">17 ~ 35</td>
<td valign="bottom" align="left">&#x2013;</td>
</tr>
<tr>
<td valign="bottom" align="left">Surface Area</td>
<td valign="bottom" align="left">Area</td>
<td valign="bottom" align="left">534.11 &#xb1; 64.12</td>
<td valign="bottom" align="left">430.76 ~ 703.86</td>
<td valign="bottom" align="left">0.7 cm&#xb2;</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Concurrently, a series of algorithms was developed for the automated quantification of these traits, facilitating the validation and accuracy assessment against manual measurements. Pearson Correlation analysis, applied following data acquisition, illuminated the interrelationships among various traits. The use of Linear Discriminant Analysis (LDA) proved instrumental in determining the significance of each trait for distinguishing between lychee varieties.</p>
<p>In the final stage of our research, the study applied classification techniques including Support Vector Machine (SVM), Random Forest Classifier (RFC), and LDA for species categorization of lychee. To optimize the classification performance with a limited sample size, preprocessing strategies like Normalization, Stratified Cross-Validation (CV), and Leave-One-Out Cross-Validation (LOO CV) were integrated. These methods contributed significantly to the robustness and accuracy of the species classification model.</p>
</sec>
</sec>
<sec id="s3" sec-type="results">
<label>3</label>
<title>Results</title>
<sec id="s3_1">
<label>3.1</label>
<title>Projection image obtained from photon-counting micro-CT system</title>
<p>Photon-counting detectors can calculate the number of photons at each different energy levels with highly energy resolutions. It will generate spectrums with full energy span, allowing for detailed information acquisition at both low and high energy levels. This tool is utilized in our photon-counting micro-CT setup to obtain clear and detailed images from CT scans. <xref ref-type="fig" rid="f3">
<bold>Figure&#xa0;3</bold>
</xref> illustrates the results achievable from this characteristic of the system. This graph is crucial for understanding the variations in density and composition of the samples at different energy levels.</p>
<fig id="f3" position="float">
<label>Figure&#xa0;3</label>
<caption>
<p>Multi-energy spectral analysis based on photon-counting detectors. <bold>(A)</bold> The left column features three sub-figures, each reconstructed using different energy spans. <bold>(B)</bold> The violin plot with a box plot on the right side were used to reveal the distribution of average Hounsfield Unit (HU) values in lychee pulp tissues.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g003.tif"/>
</fig>
</sec>
<sec id="s3_2">
<label>3.2</label>
<title>Model performance in image segmentation</title>
<p>From the segmentation results of each model on the test set, an image with same naming was selected for comparative analysis. The segmentation accuracy of models is visually represented in <xref ref-type="fig" rid="f4">
<bold>Figure&#xa0;4</bold>
</xref>. This manual segmentation is the standard we use to judge the automated models&#x2019; performance. The clear visual differences between the manual and automated segmentation provide us with insight into which model most closely replicates the expert&#x2019;s accuracy, guiding us in choosing the best model for our analysis.</p>
<fig id="f4" position="float">
<label>Figure&#xa0;4</label>
<caption>
<p>Segmentation results. The images presented in the figure are from the segmentation results of the same sample in the test set trained by different models. The first column acts as a reference, presenting the manual segmentation of the lychee fruit&#x2019;s components, namely the kernel, pulp, endocarp, and epicarp. The following columns exhibit the segmentation outcomes from different models, including AttentionUNet, DeepLabV3+, among others. These segments are highlighted with color-coded outlines, facilitating a swift comparison of each model&#x2019;s precision in relation to the manual segmentation.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g004.tif"/>
</fig>
<p>After the visual comparisons, <xref ref-type="fig" rid="f5">
<bold>Figure&#xa0;5</bold>
</xref> offers a detailed quantitative analysis of the performance of each deep learning model in segmenting various components of the lychee fruit. This analysis includes metrics such as the Mean Intersection over Union (MIoU), Dice coefficient, Recall, and Precision. These models exhibit remarkable classification capabilities, particularly for the Kernel, Pulp, and Background, with most evaluation indices approaching or exceeding 0.88, and in some cases, surpassing 0.98. They have an excellent Dice, Recall, and Precision index ranging mostly between 0.90 and 0.99, while the Mean Intersection over Union (MIoU) is recorded mostly between 0.88 and 0.98. UNet++ and TransUNet emerge as consistently strong performers. Therefore, for comprehensive segmentation of lychee images, these two models represent the most effective choices.</p>
<fig id="f5" position="float">
<label>Figure&#xa0;5</label>
<caption>
<p>Performance comparison of deep learning segmentation models. The initial five subfigures display the performance of seven CNN-based models in terms of MIoU, Dice, Recall, and Precision indices, specifically when segmenting different parts of the lychees, including the Kernel, Pulp, Endocarp, Epicarp, and Background. The final sub-figure provides an aggregated view of the average performance for each model.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g005.tif"/>
</fig>
<p>However, the segmentation performance of the endocarp and epicarp is relatively poor, with the MIoU, Dice, Recall, and Precision indices mostly below 0.80. This is likely due to the complex and relatively small structure of these two components, compared to the whole lychee fruit and pulp, and deep learning models may omit some of the information in order to improve generalization. The pericarp of mature lychee is approximately 400-500 <italic>&#xb5;m</italic> thick and consists of three layers: the epicarp, the mesocarp and the endocarp (<xref ref-type="bibr" rid="B18">Riederer et&#xa0;al., 2015</xref>). Further studies could consider using multi-scale or pyramid-based architectures to specifically improve segmentation of the endocarp and the epicarp.</p>
</sec>
<sec id="s3_3">
<label>3.3</label>
<title>Efficacy of trait extraction</title>
<p>The study further explored the effectiveness of traits extraction by comparing them to the manual measurements. <xref ref-type="fig" rid="f6">
<bold>Figure&#xa0;6</bold>
</xref> presents this comparative analysis for the dimensions &#x2014; length, width, and height &#x2014; of both the kernel and the fruit. Specifically in <xref ref-type="table" rid="T2">
<bold>Table&#xa0;2</bold>
</xref>, the MSE and RMSE values for parameter measurements are detailed, showcasing the performance metrics across different experimental conditions. In practical terms, for a lychee fruit with dimensions around 30mm, the automated measurement method incurs an error margin of approximately 1.5 mm. Similarly, for a lychee kernel measuring about 10mm, the error margin of the automated measurements is around 1.3 mm. These findings solidly affirm the reliability and precision of the automated measurement approach. The method is especially accurate in evaluating kernel dimensions, showcasing its effectiveness in detailed trait assessment.</p>
<fig id="f6" position="float">
<label>Figure&#xa0;6</label>
<caption>
<p>Comparison between manual and auto measurements for <bold>(A)</bold> fruit features and <bold>(B)</bold> kernel features. The x-axis represents the manual measurements of these morphological traits, which are considered the gold standard in our analysis. The y-axis displays the corresponding automatic measurements derived from our image segmentation algorithms. Each point on the scatter plots represents an individual measurement, with different colors symbolizing different traits: skin blue for length, dark blue for width, and green for height.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g006.tif"/>
</fig>
<table-wrap id="T2" position="float">
<label>Table&#xa0;2</label>
<caption>
<p>MSE and RMSE values for parameter measurements.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="top" align="center"/>
<th valign="top" align="center">L_Fruit</th>
<th valign="top" align="center">D_Fruit</th>
<th valign="top" colspan="2" align="center">H_Fruit</th>
<th valign="top" colspan="2" align="center">L_Kernel</th>
<th valign="top" align="center">D_Kernel</th>
<th valign="top" align="center">H_Kernel</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Mean Absolute Error (MAE)</td>
<td valign="top" align="center">1.44</td>
<td valign="top" align="center">1.58</td>
<td valign="top" colspan="2" align="center">1.26</td>
<td valign="top" colspan="2" align="center">1.39</td>
<td valign="top" align="center">1.24</td>
<td valign="top" align="center">1.66</td>
</tr>
<tr>
<td valign="top" align="left">Root Mean Squared Error (RMSE)</td>
<td valign="top" align="center">1.89</td>
<td valign="top" align="center">1.98</td>
<td valign="top" colspan="2" align="center">1.61</td>
<td valign="top" colspan="2" align="center">1.94</td>
<td valign="top" align="center">1.61</td>
<td valign="top" align="center">2.16</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3_4">
<label>3.4</label>
<title>Significance of traits for cultivar differentiation</title>
<p>In the feature selection process, this study employed both the Random Forest method and the Kendall Coefficient method, as each method offers insights into feature importance. The Random Forest approach gives insight into how features contribute to the performance of a specific model, and the Kendall rank correlation coefficient provides a quantify strength of the monotonic relationship between variables (<xref ref-type="bibr" rid="B11">Lillo-Bravo et&#xa0;al., 2023</xref>). <xref ref-type="fig" rid="f7">
<bold>Figure&#xa0;7</bold>
</xref> is an exploration on the significance of lychee traits, which consists of two y-axis referring to the two methods. According to the left yaxis, the &#x2018;W Kernel&#x2019; trait, with a value of 0.122, emerges as the most significant trait identified by the Random Forest Classifier, followed by traits such as &#x2018;B&#x2019;, &#x2018;R&#x2019;, &#x2018;L Kernel&#x2019; and &#x2018;G&#x2019;, with respective values of 0.095, 0.089, 0.088, 0.083 in descending order. While right y-axis, employing the Kendall Correlation Coefficient method, also identifies &#x2018;W Kernel&#x2019; as the most crucial trait. However, traits like &#x2018;G&#x2019;, &#x2018;H Kernel&#x2019;, &#x2018;H Pulps&#x2019;, &#x2018;H Fruit&#x2019;, &#x2018;Area&#x2019; and &#x2018;D Pulps&#x2019; exhibit an inverse pattern compared to the Random Forest method. This suggests that these traits may have complex relationship with the classification variables. Combined with the standard deviation band in the figure, the values of the individual features fluctuate widely, so that we believe that all features need to be taken into account ultimately. Moreover, <xref ref-type="fig" rid="f8">
<bold>Figure&#xa0;8</bold>
</xref> illustrates a Pearson Correlation Matrix Triangular Heatmap, which effectively showcases the interrelationships among different fruit attributes.</p>
<fig id="f7" position="float">
<label>Figure&#xa0;7</label>
<caption>
<p>Feature analysis. The left y-axis of the figure shows a histogram ranking these traits based on their significance by a Random Forest Classifier. The right y-axis is for the two lines within a shaded area. The orange line charts the Kendall Correlation Coefficient to the species variables. The blue line represents the average value of each trait after applying MinMax scaling, surrounded by the blue standard deviation zone.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g007.tif"/>
</fig>
<fig id="f8" position="float">
<label>Figure&#xa0;8</label>
<caption>
<p>Pearson correlation matrix triangular heatmap. Each cell within the heatmap provides the correlation coefficient between two attributes, with the color intensity and direction (green to blue) indicating the strength and type of the relationship.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g008.tif"/>
</fig>
<p>The dataset in this study includes eight unique lychee species, each defined by distinctive traits that set them apart. Linear Discriminant Analysis (LDA) was applied to ascertain the significance of each trait in differentiating between the lychee varieties. <xref ref-type="fig" rid="f9">
<bold>Figure&#xa0;9</bold>
</xref> provides a visual representation of the importance of various trait features across these eight lychee species. The data reveal pronounced differences in color and weight traits, and more subtle variations in size traits. This underscores the critical role of trait features in distinguishing among different varieties of lychee.</p>
<fig id="f9" position="float">
<label>Figure&#xa0;9</label>
<caption>
<p>Importance of trait features in different lychee varieties. Each subplot represents a particular lychee variety, with the vertical axis indicating the level of importance for each trait, and the horizontal axis representing different feature types.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g009.tif"/>
</fig>
</sec>
<sec id="s3_5">
<label>3.5</label>
<title>Classification accuracy with machine learning</title>
<p>Understanding the importance of phenotypic traits for each lychee species, further work was conducted to classify lychees using machine learning methods. Models would use the morphological traits extracted from lychee CT images listed in <xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref> as input, and the species as output. Given the challenges posed by small datasets, selecting the appropriate classifier and cross-validation (CV) method is crucial to ensure robustness and accurate performance evaluation of the model. Three different classifiers were employed: Support Vector Machine (SVM), Random Forest Classifier (RFC), and Linear Discriminant Analysis (LDA).</p>
<p>Additionally, the models were trained using two distinct CV methods: Stratified Cross-Validation (Stratified CV) and Leave-One-Out Cross-Validation (LOO-CV), to extract the maximum information from a limited dataset by repeatedly splitting the dataset into multiple subsets for training and testing. In the Stratified CV method with <italic>n splits</italic> = 5 for a dataset, the dataset is divided into 5 folds, ensuring that in each iteration, the training and testing sets maintain proportional representation of all 8 species, with 4 folds used for training and 1 fold for testing. In LOO-CV method, the dataset is iteratively split into training and testing sets for each sample, with one sample left out for testing in each iteration while the remaining samples are used for training.</p>
<p>The left subfigure in <xref ref-type="fig" rid="f10">
<bold>Figure&#xa0;10</bold>
</xref> illustrates the accuracy of classifiers using the Stratified CV method across different splits. It is observed that the LDA algorithm outperforms the other two, demonstrating higher average accuracy and lower standard deviation, indicating its relative effectiveness in this application. The results are depicted on the right side. It is evident that models trained with CV methods surpass the baseline models in performance, with LOO-CV yielding the most favorable results. This superiority can be attributed to LOO-CV&#x2019;s robustness and reduced susceptibility to overfitting, which are essential attributes for handling small datasets. Specifically, the LDA algorithm achieves an accuracy of 0.78 with the LOO-CV method and 0.79 with the Stratified-CV method, making it a reliable choice for such scenarios, which are highly recommended for classification tasks involving small sample sizes.</p>
<fig id="f10" position="float">
<label>Figure&#xa0;10</label>
<caption>
<p>
<bold>(A)</bold> Accuracy of classifiers with stratified CV method, which illustrates the accuracy of classifiers using the Stratified CV method across different splits. <bold>(B)</bold> Average accuracy of classifiers with and without CV methods, where the sky-blue bars indicate the accuracy of models trained without CV methods, serving as a baseline for comparison.</p>
</caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fpls-15-1358360-g010.tif"/>
</fig>
<p>Attributed to the combination of a limited sample size and relatively high feature dimensions, the potential improvement in classification results could be achieved by increasing the size of the data sample, implementing dimensionality reduction, or exploring the application of a deep learning model. The sparsity of data in higher-dimensional spaces necessitates a larger dataset for effective modeling and generalization. Particularly with small samples, this scenario may result in models failing to adequately capture the overall data structure.</p>
</sec>
</sec>
<sec id="s4" sec-type="discussion">
<label>4</label>
<title>Discussion</title>
<sec id="s4_1">
<label>4.1</label>
<title>Advantages and limitations of photon-counting micro-CT</title>
<p>The incorporation of a photon-counting micro-CT system in our study has significantly enhanced the resolution and detail in the imaging of lychee fruits. This advanced technology, known for its high resolution and multi-energy spectrum analysis, has allowed us to observe intricate details within the lychee fruits, which are critical for accurate phenotypic trait extraction. These features enable it to discover more details that conventional CT systems might miss. The ability to analyze multiple energy spectrums allows for a more nuanced and detailed understanding of the scanned subject, which is particularly beneficial in complex biological studies. While the photon-counting micro-CT system offers many advantages, there are certain challenges and limitations to consider. Its specialization in small-scale imaging may limit its applicability in larger subjects or broader clinical settings. Additionally, the complexity and cost of the technology may pose challenges for widespread adoption in varied research environments.</p>
</sec>
<sec id="s4_2">
<label>4.2</label>
<title>Broader implications for scientific research</title>
<p>The photon-counting micro-CT system, as demonstrated in our study, holds significant potential for advancing plant phenomics research. Its ability for material decomposition, exemplified by the quantification of sucrose (<xref ref-type="bibr" rid="B7">Kanno and Kuroyama, 2021</xref>), organic acids, vitamins and important minerals compounds, introduces an innovative dimension in plant biology for the analysis of internal compositions and phenotypic attributes. It could also transform the way we understand root systems (<xref ref-type="bibr" rid="B16">Mooney et&#xa0;al., 2012</xref>), seed germination, and plant-microorganism interactions, providing insights into plant growth, disease resistance, and nutrient uptake. The system&#x2019;s detailed imaging capabilities allow for non-destructive analysis, making it an invaluable tool for both plant scientists and agriculturalists in exploring and understanding the intricate details of plant life at a micro-level.</p>
</sec>
</sec>
<sec id="s5" sec-type="conclusions">
<label>5</label>
<title>Conclusions</title>
<p>This study has for the first time utilized photon-counting micro-CT system to analyze phenotypic traits in lychee cultivars, as well as the plants phenotypes, marking a significant advance in plant phenomics. The approach integrates high-resolution imaging with advanced deep learning models, enabling precise segmentation and analysis of lychee fruit structures. This methodology not only demonstrates high accuracy in distinguishing different cultivars but also sets a new standard in non-destructive plant trait analysis. By combining detailed imaging with robust data processing techniques, our study opens up new possibilities for research in plant biology and agricultural sciences, offering a novel and efficient tool for cultivar classification and in-depth trait examination.</p>
</sec>
<sec id="s6" sec-type="data-availability">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/supplementary material. Further inquiries can be directed to the corresponding author.</p>
</sec>
<sec id="s7" sec-type="author-contributions">
<title>Author contributions</title>
<p>MX: Writing &#x2013; review &amp; editing, Data curation, Investigation. SH: Conceptualization, Visualization, Writing &#x2013; original draft. WX: Methodology, Resources, Writing &#x2013; review &amp; editing. TX: Funding acquisition, Project administration, Resources, Supervision, Writing &#x2013; review &amp; editing.</p>
</sec>
</body>
<back>
<sec id="s8" sec-type="funding-information">
<title>Funding</title>
<p>The author(s) declare financial support was received for the research, authorship, and/or publication of this article. This work was supported by National Natural Science Foundation of China (32227801).</p>
</sec>
<sec id="s9" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s10" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alqazzaz</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Nokes</surname> <given-names>L.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Automated brain tumor segmentation on multi-modal mr image using segnet</article-title>. <source>Comput. Visual Media</source> <volume>5</volume>, <fpage>209</fpage>&#x2013;<lpage>219</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s41095-019-0139-y</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Begot</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Slavkovic</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Oger</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Pichot</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Morin</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Boualem</surname> <given-names>A.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Precision phenotyping of nectar-related traits using x-ray micro computed tomography</article-title>. <source>Cells</source> <volume>11</volume>, <fpage>3452</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/cells11213452</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chen</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Lu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yu</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Luo</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Adeli</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Transunet: Transformers make strong encoders for medical image segmentation</article-title>. <source>arXiv preprint</source>. [Preprint]. Available at: <uri xlink:href="https://arxiv.org/abs/2102.04306">https://arxiv.org/abs/2102.04306</uri>.</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>da Cruz</surname> <given-names>L. B.</given-names>
</name>
<name>
<surname>J&#xfa;nior</surname> <given-names>D. A. D.</given-names>
</name>
<name>
<surname>Diniz</surname> <given-names>J. O. B.</given-names>
</name>
<name>
<surname>Silva</surname> <given-names>A. C.</given-names>
</name>
<name>
<surname>de Almeida</surname> <given-names>J. D. S.</given-names>
</name>
<name>
<surname>de Paiva</surname> <given-names>A. C.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Kidney tumor segmentation from computed tomography images using deeplabv3 + 2.5 d model</article-title>. <source>Expert Syst. Appl.</source> <volume>192</volume>, <fpage>116270</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.eswa.2021.116270</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Huang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Lin</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Tong</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Iwamoto</surname> <given-names>Y.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Unet 3+: A full-scale connected unet for medical image segmentation</article-title>. <conf-name>IEEE Xplore. ICASSP 2020-2020 IEEE international conference on acoustics, speech and signal processing (ICASSP)</conf-name>. <conf-date>2020 May 4-8</conf-date>. <conf-loc>Barcelona, Spain</conf-loc>: <publisher-name>IEEE</publisher-name>, <fpage>1055</fpage>&#x2013;<lpage>1059</lpage>.</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jiao</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Jia</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Lei</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Cai</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Zhong</surname> <given-names>Z.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>An effective litchi detection method based on edge devices in a complex scene</article-title>. <source>Biosyst. Eng.</source> <volume>222</volume>, <fpage>15</fpage>&#x2013;<lpage>28</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.biosystemseng.2022.07.009</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kanno</surname> <given-names>I.</given-names>
</name>
<name>
<surname>Kuroyama</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Estimation of the sugar content of fruit by energy-resolved computed tomography using a material decomposition method</article-title>. <source>J. Nucl. Sci. Technol.</source> <volume>58</volume>, <fpage>533</fpage>&#x2013;<lpage>541</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1080/00223131.2020.1845836</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Karahara</surname> <given-names>I.</given-names>
</name>
<name>
<surname>Yamauchi</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Uesugi</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Mineyuki</surname> <given-names>Y.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Three-dimensional visualization of plant tissues and organs by x-ray micro-computed tomography</article-title>. <source>Microscopy</source> <volume>72</volume> (<issue>4</issue>), <fpage>310</fpage>&#x2013;<lpage>25</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jmicro/dfad026</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Keklikoglou</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Arvanitidis</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Chatzigeorgiou</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Chatzinikolaou</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Karagiannidis</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Koletsa</surname> <given-names>T.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Micro-ct for biological and biomedical studies: A comparison of imaging techniques</article-title>. <source>J. Imaging</source> <volume>7</volume>, <fpage>172</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/jimaging7090172</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Kurogane</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Tamaoki</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Yano</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Tanigaki</surname> <given-names>F.</given-names>
</name>
<name>
<surname>Shimazu</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Kasahara</surname> <given-names>H.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Visualization of arabidopsis root system architecture in 3d by refraction-contrast x-ray micro-computed tomography</article-title>. <source>Microscopy</source> <volume>70</volume>, <fpage>536</fpage>&#x2013;<lpage>544</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1093/jmicro/dfab027</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lillo-Bravo</surname> <given-names>I.</given-names>
</name>
<name>
<surname>Vera-Medina</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Fernandez-Peruchena</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Perez-Aparicio</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Lopez-Alvarez</surname> <given-names>J. A.</given-names>
</name>
<name>
<surname>Delgado-Sanchez</surname> <given-names>J. M.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Random forest model to predict solar water heating system performance</article-title>. <source>Renewable Energy</source> <volume>216</volume>, <fpage>119086</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.renene.2023.119086</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Mei</surname> <given-names>M.</given-names>
</name>
</person-group> (<year>2005</year>). <article-title>Classification of lychee cultivars with rapd analysis</article-title>. <source>Acta Hortic</source>. <volume>665</volume>, <fpage>149</fpage>&#x2013;<lpage>160</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.17660/ActaHortic.2005.665.17</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>R.</given-names>
</name>
<name>
<surname>Hu</surname> <given-names>T.</given-names>
</name>
<name>
<surname>He</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>Z. S.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Nondestructive 3d phenotyping method of passion fruit based on x-ray micro-computed tomography and deep learning</article-title>. <source>Front. Plant Sci.</source> <volume>13</volume>, <elocation-id>1087904</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2022.1087904</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Menzel</surname> <given-names>C.</given-names>
</name>
</person-group> (<year>1985</year>). <article-title>Propagation of lychee: a review</article-title>. <source>Scientia Hortic.</source> <volume>25</volume>, <fpage>31</fpage>&#x2013;<lpage>48</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/0304-4238(85)90074-3</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mishra</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Chaudhury</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Sarkar</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Soin</surname> <given-names>A. S.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Ultrasound image segmentation: a deeply supervised network with attention to boundaries</article-title>. <source>IEEE Trans. Biomed. Eng.</source> <volume>66</volume>, <fpage>1637</fpage>&#x2013;<lpage>1648</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TBME.10</pub-id>
</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mooney</surname> <given-names>S. J.</given-names>
</name>
<name>
<surname>Pridmore</surname> <given-names>T. P.</given-names>
</name>
<name>
<surname>Helliwell</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Bennett</surname> <given-names>M. J.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Developing x-ray computed tomography to non-invasively image 3-d root systems architecture in soil</article-title>. <source>Plant Soil</source> <volume>352</volume>, <fpage>1</fpage>&#x2013;<lpage>22</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s11104-011-1039-9</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Osako</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Yamane</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Lin</surname> <given-names>S.-Y.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>P.-A.</given-names>
</name>
<name>
<surname>Tao</surname> <given-names>R.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Cultivar discrimination of litchi fruit images using deep learning</article-title>. <source>Scientia Hortic.</source> <volume>269</volume>, <fpage>109360</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.scienta.2020.109360</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Riederer</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Arand</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Burghardt</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Riedel</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Schuster</surname> <given-names>A.-C.</given-names>
</name>
<etal/>
</person-group>. (<year>2015</year>). <article-title>Water loss from litchi (litchi chinensis) and longan (dimocarpus longan) fruits is biphasic and controlled by a complex pericarpal transpiration barrier</article-title>. <source>Planta</source> <volume>242</volume>, <fpage>1207</fpage>&#x2013;<lpage>1219</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s00425-015-2360-y</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Ronneberger</surname> <given-names>O.</given-names>
</name>
<name>
<surname>Fischer</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Brox</surname> <given-names>T.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>U-Net: Convolutional Networks for Biomedical Image Segmentation</article-title>. In: <person-group person-group-type="editor">
<name>
<surname>Navab</surname> <given-names>N.</given-names>
</name>
</person-group>, editor. <conf-name>International Conference on Medical Image Computing and Computer-Assisted Intervention. Medical Image Computing and Computer-Assisted Intervention &#x2013; MICCAI 2015</conf-name>; <conf-date>2015 Nov 18</conf-date>; <conf-loc>Munich, Germany</conf-loc>. (<publisher-loc>Cham</publisher-loc>: <publisher-name>Springer</publisher-name>), p. <fpage>234</fpage>&#x2013;<lpage>41</lpage>.</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tran</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Kanzaki</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Triest</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Hormaza</surname> <given-names>I.</given-names>
</name>
<name>
<surname>Kuk</surname> <given-names>N. J.</given-names>
</name>
<name>
<surname>Ming</surname> <given-names>R.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Analysis of genetic diversity of lychee (litchi chinensis sonn.) and wild forest relatives in the sapindaceae from Vietnam using microsatellites</article-title>. <source>Genet. Resour. Crop Evol.</source> <volume>66</volume>, <fpage>1653</fpage>&#x2013;<lpage>1669</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1007/s10722-019-00837-y</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wu</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Feng</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Duan</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Dai</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>X.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>A deep learning-integrated micro-ct image analysis pipeline for quantifying rice lodging resistance-related traits</article-title>. <source>Plant Commun.</source> <volume>2</volume> (<issue>2</issue>), <fpage>100165</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1016/j.xplc.2021.100165</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xie</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Peng</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Chen</surname> <given-names>B.</given-names>
</name>
<name>
<surname>Jing</surname> <given-names>T.</given-names>
</name>
<name>
<surname>Sun</surname> <given-names>D.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>Litchi detection in a complex natural environment using the yolov5-litchi model</article-title>. <source>Agronomy</source> <volume>12</volume>, <fpage>3054</fpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.3390/agronomy12123054</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Yang</surname> <given-names>W.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>D.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>J.</given-names>
</name>
<name>
<surname>He</surname> <given-names>Q.</given-names>
</name>
<etal/>
</person-group>. (<year>2022</year>). <article-title>A non-destructive coconut fruit and seed traits extraction method based on micro-ct and deeplabv3+ model</article-title>. <source>Front. Plant Sci.</source> <volume>13</volume>, <elocation-id>1069849</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fpls.2022.1069849</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Jin</surname> <given-names>D.</given-names>
</name>
<name>
<surname>An</surname> <given-names>X.</given-names>
</name>
<name>
<surname>Duan</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Duan</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Lian</surname> <given-names>F.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Lychee seed as a potential hypoglycemic agent, and exploration of its underlying mechanisms</article-title>. <source>Front. Pharmacol.</source> <volume>12</volume>, <elocation-id>737803</elocation-id>. doi:&#xa0;<pub-id pub-id-type="doi">10.3389/fphar.2021.737803</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhou</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Siddiquee</surname> <given-names>M. M. R.</given-names>
</name>
<name>
<surname>Tajbakhsh</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Liang</surname> <given-names>J.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Unet++: Redesigning skip connections to exploit multiscale features in image segmentation</article-title>. <source>IEEE Trans. Med. Imaging</source> <volume>39</volume>, <fpage>1856</fpage>&#x2013;<lpage>1867</lpage>. doi:&#xa0;<pub-id pub-id-type="doi">10.1109/TMI.42</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>