<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Archiving and Interchange DTD v2.3 20070202//EN" "archivearticle.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="editorial" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Plant Sci.</journal-id>
<journal-title>Frontiers in Plant Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Plant Sci.</abbrev-journal-title>
<issn pub-type="epub">1664-462X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fpls.2022.1079022</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Plant Science</subject>
<subj-group>
<subject>Editorial</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Editorial: Innovative use of imaging techniques within plant science</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Egea</surname>
<given-names>Gregorio</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref> <uri xlink:href="https://loop.frontiersin.org/people/499698"/>
<xref ref-type="author-notes" rid="fn003">
<sup>&#x2020;</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Bucksch</surname>
<given-names>Alexander</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref> <uri xlink:href="https://loop.frontiersin.org/people/258213"/>
<xref ref-type="author-notes" rid="fn003">
<sup>&#x2020;</sup>
</xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Thygesen</surname>
<given-names>Lisbeth G.</given-names>
</name>
<xref ref-type="aff" rid="aff5">
<sup>5</sup>
</xref> <uri xlink:href="https://loop.frontiersin.org/people/692155"/>
<xref ref-type="author-notes" rid="fn003">
<sup>&#x2020;</sup>
</xref>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>Area of Agroforestry Engineering, School of Agricultural Engineering, University of Seville</institution>, <addr-line>Seville</addr-line>, <country>Spain</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Department of Plant Biology, University of Georgia</institution>, <addr-line>Athens, GA</addr-line>, <country>United States</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>Warnell School of Forestry and Natural Resources, University of Georgia</institution>, <addr-line>Athens, GA</addr-line>, <country>United States</country>
</aff>
<aff id="aff4">
<sup>4</sup>
<institution>Institute of Bioinformatics, University of Georgia</institution>, <addr-line>Athens, GA</addr-line>, <country>United States</country>
</aff>
<aff id="aff5">
<sup>5</sup>
<institution>Department of Geosciences and Natural Resource Management, University of Copenhagen</institution>, <addr-line>Frederiksberg</addr-line>, <country>Denmark</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited and Reviewed by: Roger Deal, Emory University, United States</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Gregorio Egea, <email xlink:href="mailto:gegea@us.es">gegea@us.es</email>
</p>
</fn>
<fn fn-type="other" id="fn003">
<p>&#x2020;ORCID: Gregorio Egea, <uri xlink:href="http://orcid.org/0000-0001-6285-0981">orcid.org/0000-0001-6285-0981</uri>; Alexander Bucksch, <uri xlink:href="http://orcid.org/0000-0002-1071-5355">orcid.org/0000-0002-1071-5355</uri>; Lisbeth G. Thygesen, <uri xlink:href="http://orcid.org/0000-0001-9685-7460">orcid.org/0000-0001-9685-7460</uri>
</p>
</fn>
<fn fn-type="other" id="fn002">
<p>This article was submitted to Technical Advances in Plant Science, a section of the journal Frontiers in Plant Science</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>22</day>
<month>11</month>
<year>2022</year>
</pub-date>
<pub-date pub-type="collection">
<year>2022</year>
</pub-date>
<volume>13</volume>
<elocation-id>1079022</elocation-id>
<history>
<date date-type="received">
<day>24</day>
<month>10</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>02</day>
<month>11</month>
<year>2022</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2022 Egea, Bucksch and Thygesen</copyright-statement>
<copyright-year>2022</copyright-year>
<copyright-holder>Egea, Bucksch and Thygesen</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<related-article id="RA1" related-article-type="commentary-article" xlink:href="https://www.frontiersin.org/research-topics/34975/Innovative-use-of-imaging-techniques-within-plant-science" ext-link-type="uri">
<bold>Editorial on the Research Topic</bold> <article-title>Innovative use of imaging techniques within plant science</article-title>
</related-article>
<kwd-group>
<kwd>imaging</kwd>
<kwd>non-destructive measurements</kwd>
<kwd>unmanned aerial vehicle (UAV)</kwd>
<kwd>microscopy</kwd>
<kwd>microspectroscopy</kwd>
<kwd>artificial intelligence</kwd>
</kwd-group>
<counts>
<fig-count count="0"/>
<table-count count="0"/>
<equation-count count="0"/>
<ref-count count="0"/>
<page-count count="3"/>
<word-count count="1213"/>
</counts>
</article-meta>
</front>
<body>
<sec id="s1" sec-type="intro">
<p>Several examples in the history of biology show how technological advances have facilitated fundamental discoveries in biology. The development and application of imaging techniques in plant sciences represent such an example that is currently unfolding. By using image analysis, spatially resolved information can be obtained that allows new questions in the field to be explored. Furthermore, when applied for example in crop monitoring, quality control or management, these techniques allow objective real-time decisions to be made, often based on non-destructive measurements and a reduction in time and labor that could also translate into cost savings.</p>
<p>This Research Topic brings together research papers that demonstrate how image-based techniques can help solve actual problems in the world of plant sciences. Generally, the presented papers offer image-based solutions to assess plant disease status, predict and detect grain and fruit yield, and analyze wood samples for their species and quality. These general application areas were achieved with a range of imaging instruments from the microscopy level to airborne image collection with unmanned aerial vehicles (UAV).</p>
<p><ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2021.686332">Zhang et&#xa0;al. (2021)</ext-link> tackle the long-standing and laborious yield prediction problem to precisely quantify yellowness in canola flowers. In doing so, they propose a UAV method to effectively estimate yield in Canola (Brassica napus L.) from airborne imagery. Their remote-sensing solution is to define a normalized yellowness vegetation index (NDYI) that demonstrated high predictive performance for seed yield.</p>
<p>Using similar technology, <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2022.820585">Shi et&#xa0;al. (2022)</ext-link> propose the use of UAV-based multispectral imagery and machine learning (ML) models for aboveground biomass (AGB) and leaf area index (LAI) estimation of two intercropping species (mung bean and red bean) in tea plantations. Five ML algorithms were evaluated based on the vegetation indices derived from the UAV multispectral images as well as the actual AGB and LAI data. Their results show that two models (Support Vector Machine and Back Propagation Neural Network) outperformed the AGB and LAI prediction of red bean and mung bean as compared to other ML models.</p>
<p>Crop disease detection using image-based techniques is also a field that experiences growth due to the positive impact crop productivity and greater environmental and economic sustainability of agriculture. In this sense, <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2022.1002627">Jiang et&#xa0;al. (2022)</ext-link> have conducted a study aimed at assessing the severity of wheat stripe rust using a low-cost approach based to evaluate images of infected leafs obtained by smartphones. This approach may represent a compromise between the sometimes-subjective visual disease assessment and symptoms assessment using costly devices such as multi- and hyper- spectral cameras. Along, <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2022.1010249">Leiva et&#xa0;al. (2022)</ext-link> compared the performance of two low-cost image-based methods for predicting Fusarium Head Blight (FHB) infection in winter wheat seeds. The two analysis methods use RGB images of wheat seeds to provide various morphological traits of the seed, which were used to predict FHB using multiple regression models.</p>
<p>The development of robots for automatic fruit harvesting is a growing discipline due to the increasing costs of manual harvesting and the difficulty of finding skilled labor. Accurate and robust detection of fruits under natural conditions is crucial for the success of automatic fruit harvesting with robots. In this line of work, <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2022.972445">Hou et&#xa0;al. (2022)</ext-link> have developed a methodology based on the use of binocular cameras and deep learning to improve both citrus fruit detection and 3D localization under natural lighting conditions in commercial citrus orchards. To this end, an improved version of the YOLO v5s model is proposed for citrus detection, Cr-Cb chromatic mapping together with Otsu threshold algorithm and morphology processing are used to extract citrus shape, and a geometric image model for 3D citrus localization. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2022.942875">Liu et&#xa0;al. (2022)</ext-link> present another work aimed at improving the automatic detection of fruits under natural conditions using deep learning models. In their case study, the authors have developed an anchor-free detector based on the CenterNet architecture that outperforms other tomato detection methods.</p>
<p>Another innovative application of the use of image-based techniques is that developed by <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2022.945291">Husaini et&#xa0;al. (2022)</ext-link> for the detection of fraudulent saffron. Saffron adulteration is a major problem, because Saffron is an expensive spice that is normally used as hand-picked dried flower stigmas. As a technological advancement, the authors have successfully tested two new methods for detecting adulterated saffron, one based on the use of a low-cost optical microscope (Foldscope) in combination with a chemical staining technique for visual identification of fake saffron samples, and another based on deep learning to automatically classify images taken with Foldscope and a smartphone.</p>
<p><ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2021.792981">Berger et&#xa0;al. (2021)</ext-link> report on a study in which image data obtained using darkfield and fluorescence microscopy was used to quantify the histology in cross sections of whole maize stems. This information was used for phenotyping different maize lines. The method developed makes it possible to assess unusually large cross sections, i.e., in the cm range. It is possible to quantify plant anatomy and autofluorescence after excitation with ultraviolet and/or visible light.</p>
<p>Determining the wood species or genus of timber and wooden artefacts based on light microscopy is important when controlling wood trade, especially to protect endangered tree species. However, wood identification is a skill that requires training and expertise, which means that far less wood is controlled than one could wish for from a conservation viewpoint. Adding to the challenge is the limited availability of microscopy images from known species in species-rich forests. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2021.760139">Lopes et&#xa0;al. (2022)</ext-link> describe an exciting first step towards addressing this problem. Their approach involves neural networks to generate artificial images based on microscopy images of known species. In a second step, the method increases the number of images available per species to train neural networks to be able to identify the wood species in microscopy images of unknown species.</p>
<p>The article by <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fpls.2022.986578">Ponzecchi et&#xa0;al. (2022)</ext-link> describes a study where chemically modified wood was studied using Raman micro-spectroscopy. The novelty of this article lies in the development and test of a miniature climate chamber that makes it possible to adjust the relative humidity of microtomed sample sections mounted below a normal coverslip while they are presented to the instrument. In addition to the advantage of securing a well-defined and adjustable relative humidity, the setup has the advantage of being compatible with immersion objectives.</p>
<p>Together, the articles of this Research Topic illustrate the many useful applications that are currently being explored within this active field of research and development.</p>
</sec>
<sec id="s2" sec-type="author-contributions">
<title>Author contributions</title>
<p>All authors listed above have made substantial contributions to the work. All authors have approved the final manuscript for publication.</p>
</sec>
<sec id="s3" sec-type="acknowledgment">
<title>Acknowledgments</title>
<p>We thank all authors and reviewers for their contributions to this Research Topic and for the support of the editorial office.</p>
</sec>
<sec id="s4" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s5" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
</body>
</article>