<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Microbiol.</journal-id>
<journal-title>Frontiers in Microbiology</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Microbiol.</abbrev-journal-title>
<issn pub-type="epub">1664-302X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fmicb.2023.1084312</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Microbiology</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>EMDS-7: Environmental microorganism image dataset seventh version for multiple object detection evaluation</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Yang</surname> <given-names>Hechen</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/2160351/overview"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Li</surname> <given-names>Chen</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1781193/overview"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Zhao</surname> <given-names>Xin</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c002"><sup>&#x0002A;</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1554722/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Cai</surname> <given-names>Bencheng</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Zhang</surname> <given-names>Jiawei</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Ma</surname> <given-names>Pingli</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Zhao</surname> <given-names>Peng</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1912767/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Chen</surname> <given-names>Ao</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Jiang</surname> <given-names>Tao</given-names></name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Sun</surname> <given-names>Hongzan</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
</contrib>
<contrib contrib-type="author">
<name><surname>Teng</surname> <given-names>Yueyang</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/2075870/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Qi</surname> <given-names>Shouliang</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/283101/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Huang</surname> <given-names>Xinyu</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/2104741/overview"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Grzegorzek</surname> <given-names>Marcin</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
<xref ref-type="aff" rid="aff7"><sup>7</sup></xref>
<uri xlink:href="http://loop.frontiersin.org/people/1777999/overview"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Microscopic Image and Medical Image Analysis Group, College of Medicine and Biological Information Engineering, Northeastern University</institution>, <addr-line>Shenyang</addr-line>, <country>China</country></aff>
<aff id="aff2"><sup>2</sup><institution>School of Resources and Civil Engineering, Northeastern University</institution>, <addr-line>Shenyang</addr-line>, <country>China</country></aff>
<aff id="aff3"><sup>3</sup><institution>School of Intelligent Medicine, Chengdu University of Traditional Chinese Medicine</institution>, <addr-line>Chengdu</addr-line>, <country>China</country></aff>
<aff id="aff4"><sup>4</sup><institution>International Joint Institute of Robotics and Intelligent Systems, Chengdu University of Information Technology</institution>, <addr-line>Chengdu</addr-line>, <country>China</country></aff>
<aff id="aff5"><sup>5</sup><institution>Shengjing Hospital, China Medical University</institution>, <addr-line>Shenyang</addr-line>, <country>China</country></aff>
<aff id="aff6"><sup>6</sup><institution>Institute of Medical Informatics, University of L&#x000FC;beck</institution>, <addr-line>L&#x000FC;beck</addr-line>, <country>Germany</country></aff>
<aff id="aff7"><sup>7</sup><institution>Department of Knowledge Engineering, University of Economics in Katowice</institution>, <addr-line>Katowice</addr-line>, <country>Poland</country></aff>
<author-notes>
<fn fn-type="edited-by"><p>Edited by: Chao Jiang, Zhejiang University, China</p></fn>
<fn fn-type="edited-by"><p>Reviewed by: Xin Zhou, Stanford University, United States; Shuai Liu, Hunan Normal University, China</p></fn>
<corresp id="c001">&#x0002A;Correspondence: Chen Li &#x02709; <email>lichen&#x00040;bmie.neu.edu.cn</email></corresp>
<corresp id="c002">Xin Zhao &#x02709; <email>zhaoxin&#x00040;mail.neu.edu.cn</email></corresp>
<fn fn-type="other" id="fn001"><p>This article was submitted to Systems Microbiology, a section of the journal Frontiers in Microbiology</p></fn></author-notes>
<pub-date pub-type="epub">
<day>20</day>
<month>02</month>
<year>2023</year>
</pub-date>
<pub-date pub-type="collection">
<year>2023</year>
</pub-date>
<volume>14</volume>
<elocation-id>1084312</elocation-id>
<history>
<date date-type="received">
<day>30</day>
<month>10</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>30</day>
<month>01</month>
<year>2023</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2023 Yang, Li, Zhao, Cai, Zhang, Ma, Zhao, Chen, Jiang, Sun, Teng, Qi, Huang and Grzegorzek.</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Yang, Li, Zhao, Cai, Zhang, Ma, Zhao, Chen, Jiang, Sun, Teng, Qi, Huang and Grzegorzek</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p></license> </permissions>
<abstract>
<p>Nowadays, the detection of environmental microorganism indicators is essential for us to assess the degree of pollution, but the traditional detection methods consume a lot of manpower and material resources. Therefore, it is necessary for us to make microbial data sets to be used in artificial intelligence. The Environmental Microorganism Image Dataset Seventh Version (EMDS-7) is a microscopic image data set that is applied in the field of multi-object detection of artificial intelligence. This method reduces the chemicals, manpower and equipment used in the process of detecting microorganisms. EMDS-7 including the original <italic>Environmental Microorganism (EM) images</italic> and the corresponding object labeling files in &#x0201C;.XML&#x0201D; format file. The EMDS-7 data set consists of 41 types of EMs, which has a total of 2,65 images and 13,216 labeled objects. The EMDS-7 database mainly focuses on the object detection. In order to prove the effectiveness of EMDS-7, we select the most commonly used deep learning methods (<italic>Faster-Region Convolutional Neural Network</italic> (Faster-RCNN), YOLOv3, YOLOv4, SSD, and RetinaNet) and evaluation indices for testing and evaluation. EMDS-7 is freely published for non-commercial purpose at: <ext-link ext-link-type="uri" xlink:href="https://figshare.com/articles/dataset/EMDS-7_DataSet/16869571">https://figshare.com/articles/dataset/EMDS-7_DataSet/16869571</ext-link>.</p></abstract>
<kwd-group>
<kwd>environmental microorganism</kwd>
<kwd>image dataset construction</kwd>
<kwd>image analysis</kwd>
<kwd>multiple object detection</kwd>
<kwd>deep learning</kwd>
</kwd-group>
<contract-sponsor id="cn001">National Natural Science Foundation of China<named-content content-type="fundref-id">10.13039/501100001809</named-content></contract-sponsor>
<counts>
<fig-count count="4"/>
<table-count count="3"/>
<equation-count count="0"/>
<ref-count count="52"/>
<page-count count="11"/>
<word-count count="6908"/>
</counts>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<title>1. Introduction</title>
<sec>
<title>1.1. Environmental microorganisms</title>
<p>Today, Environmental Microorganisms (EMs) are inseparable from our lives (Li et al., <xref ref-type="bibr" rid="B16">2016</xref>). Some EMs are conducive to the development of ecology and promote the progress of human civilization (Zhang et al., <xref ref-type="bibr" rid="B47">2022b</xref>). However, some EMs hinder ecological balance and even cause urban water pollution to affect human health (Anand et al., <xref ref-type="bibr" rid="B2">2021</xref>). For example, <italic>Oscillatoria</italic> is a common EM, which can be observed in various freshwater environments and can thrive in various environments. When it reproduces vigorously, it will produce unpleasant odors, cause water pollution, consume oxygen in the water, and cause fish and shrimp to die of hypoxia (Lu et al., <xref ref-type="bibr" rid="B28">2021</xref>). In addition, <italic>Scenedesmus</italic> is also a freshwater planktonic algae microorganism, which is usually composed of four to eight cells. <italic>Scenedesmus</italic> has strong resistance to organic pollutants, and plays a vital role in water self-purification and sewage purification (Kashyap et al., <xref ref-type="bibr" rid="B12">2021</xref>). The images of the EMs proposed above are shown in <xref ref-type="fig" rid="F1">Figure 1</xref>. But researchers using traditional methods of identifying and analyzing microorganisms will consume a lot of manpower and material resources (Ji et al., <xref ref-type="bibr" rid="B11">2021</xref>). Computer vision analysis method is of great significance, it can help researchers to analyze EMs with higher precision and more comprehensive indicators (Kulwa et al., <xref ref-type="bibr" rid="B15">2022</xref>).</p>
<fig id="F1" position="float">
<label>Figure 1</label>
<caption><p>An example of EM images.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmicb-14-1084312-g0001.tif"/>
</fig>
</sec>
<sec>
<title>1.2. Research background</title>
<p>Automated microscopes and intelligent microscopes are currently a major trend in development, and everyone is working to develop faster, more accurate and objective hardware and software (Abdulhay et al., <xref ref-type="bibr" rid="B1">2018</xref>). Compared with manual identification and observation methods, computer-aided detection methods are more objective, accurate and convenient. With the rapid development of computer vision and deep learning technologies, computer-aided image analysis has been widely used in many research areas, including histopathology image analysis (Chen et al., <xref ref-type="bibr" rid="B5">2022a</xref>,<xref ref-type="bibr" rid="B6">b</xref>; Hu et al., <xref ref-type="bibr" rid="B9">2022a</xref>,<xref ref-type="bibr" rid="B10">b</xref>; Li et al., <xref ref-type="bibr" rid="B19">2022</xref>), cytopathology image analysis (Rahaman et al., <xref ref-type="bibr" rid="B31">2020a</xref>, <xref ref-type="bibr" rid="B33">2021</xref>; Liu et al., <xref ref-type="bibr" rid="B24">2022a</xref>,<xref ref-type="bibr" rid="B25">b</xref>), object detection (Chen A. et al., <xref ref-type="bibr" rid="B4">2022</xref>; Ma et al., <xref ref-type="bibr" rid="B29">2022</xref>; Zou et al., <xref ref-type="bibr" rid="B52">2022</xref>), microorganism classification (Yang et al., <xref ref-type="bibr" rid="B42">2022</xref>; Zhang et al., <xref ref-type="bibr" rid="B46">2022a</xref>; Zhao et al., <xref ref-type="bibr" rid="B50">2022a</xref>), microorganism segmentation (Zhang et al., <xref ref-type="bibr" rid="B44">2020</xref>, <xref ref-type="bibr" rid="B43">2021a</xref>; Kulwa et al., <xref ref-type="bibr" rid="B14">2023</xref>), and microorganism counting (Zhang et al., <xref ref-type="bibr" rid="B45">2021b</xref>, <xref ref-type="bibr" rid="B48">2022c</xref>,<xref ref-type="bibr" rid="B49">d</xref>). In addition, with the advancement of computer hardware and the rapid development of computer-aided detection methods. In addition, with the advancement of computer hardware and the rapid development of computer-aided detection methods, the results obtained by computer-aided detection methods in EM testing have been improving. EMs play a very important role in the whole ecosystem. Because of their small size, invisibility to the naked eye, and unknown nature, studying EMs has always been a challenge for humans (Ma et al., <xref ref-type="bibr" rid="B29">2022</xref>). Generally there are four traditional methods of detecting EMs. The first is physical method, which has a high degree of accuracy, but uses very expensive equipment and a time-consuming analytical process (Yamaguchi et al., <xref ref-type="bibr" rid="B41">2015</xref>). The second method is the chemical method, which has a high identification capacity, but is often affected by environmental contamination from chemical agents. The third molecular biological method, by detecting the genes of microorganisms and analyzing them. The accuracy of this method is very high, but at the same time it consumes a lot of consumes a lot of human and material resources (Kosov et al., <xref ref-type="bibr" rid="B13">2018</xref>). The fourth is morphological method, which requires a skilled researcher to observe the shape of EM under a microscope, which is inefficient and time consuming (Li et al., <xref ref-type="bibr" rid="B18">2019</xref>). These four traditional methods of analyzing EM have their own advantages and disadvantages. As deep learning has been widely used in machine vision analysis in recent years (Shen et al., <xref ref-type="bibr" rid="B37">2017</xref>), it can compensate the shortcomings of the four traditional methods while retaining accuracy (Kulwa et al., <xref ref-type="bibr" rid="B14">2023</xref>). However, there are few public EM databases available to researchers today. This hinders the analysis of EM. In recent years with the advent of the EMDS series of datasets, research on artificial intelligence on EM has been carried out. With the update of EDMS-1 to EMDS-6, environmental microbial images are widely used for classification, segmentation, and retrieval (Zhao et al., <xref ref-type="bibr" rid="B51">2022b</xref>). However, there is still a gap for multi-objective EM detection, and EMDS-7 fills this gap as a multi-objective EM dataset. the EMDS details are listed in <xref ref-type="table" rid="T3">Table 3</xref>.</p>
</sec>
<sec>
<title>1.3. EM image processing and analysis</title>
<p>Image analysis is the combination of mathematical models and image processing techniques to analyze and extract certain intelligence information (Song et al., <xref ref-type="bibr" rid="B38">2022</xref>). Image processing refers to the use of computers to analyze images. Common image processing includes image denoising, image segmentation, and feature extraction (Liu et al., <xref ref-type="bibr" rid="B27">2022d</xref>). Image noise appears in the process of acquiring and transmitting EMs images (Gonzalez and Woods, <xref ref-type="bibr" rid="B8">2002</xref>). Image denoising can reduce the noise of the EM image while preserving the details of the image (Rahaman et al., <xref ref-type="bibr" rid="B32">2020b</xref>). Besides, in the process of deep learning based EMs image analyzation, we can extract the features of EM images, then send them to the deep learning network model for training, and match them with known data to classify, retrieve and detect EMs (Liu et al., <xref ref-type="bibr" rid="B26">2022c</xref>). In addition, EM images can also be applied in the field of image segmentation to separate microorganisms from the complex background of the image (Pal and Pal, <xref ref-type="bibr" rid="B30">1993</xref>). Meanwhile, EM images can be used in the field of EM object detection. First, we can frame and mark the known EM objects in the original image, and then transfer the image to the object detection model for feature extraction and network training (Zhang et al., <xref ref-type="bibr" rid="B45">2021b</xref>). Finally, the trained model can be applied for object detection of EMs.</p>
</sec>
<sec>
<title>1.4. Contribution</title>
<p>EMs are one of the important indicators for investigating the environment, so it is very essential to collect EM data and information (Kosov et al., <xref ref-type="bibr" rid="B13">2018</xref>). Environmental Microorganism Image Dataset SeventhVersion (EMDS-7) are all taken from urban areas, which can be used to monitor the pollution of the urban water environment. Furthermore, due to the constant changes in conditions such as temperature and humidity, EMs are very sensitive to these conditions, so the biomass of EMs are easily affected (Rodriguez et al., <xref ref-type="bibr" rid="B36">2020</xref>). It is difficult to collect enough EM images. Currently, there are some EM data sets, but many of them are not open source. EMDS-7 is provided to researchers as an open source data set. In addition, we prepare high-quality corresponding object label files of EMDS-7 for algorithms and model evaluation. The label file of EMs can be directly used in multiple object detection and analysis. EMDS-7 has a variety of EM images, which provides sufficient data support for EMs object detection and achieves satisfactory detection results. Researchers can apply many artificial intelligence methods instead of traditional analysis methods to analyze microorganisms in EMDS-7.</p>
<p>The main contributions of this paper are as follows.</p>
<list list-type="simple">
<list-item><p>(1) EMDS-7 is available to researchers as an open source dataset that helps to analyze microbial images.</p></list-item>
<list-item><p>(2) High quality corresponding object label files of EMDS-7 for algorithm and model evaluation. Label files of EMs can be directly used for detection and analysis of multiple objects.</p></list-item>
<list-item><p>(3) Performance analysis of multiple object detection models on EMDS-7 is provided, which facilitates further ensemble learning.</p></list-item>
</list>
</sec>
</sec>
<sec id="s2">
<title>2. Dataset information of EMDS-7</title>
<p>EMDS-7 consists of 2365 images of 42 EM categories and 13216 labeled objects. The EM sampling sources are images taken from different lakes and rivers in Shenyang (Northeast China) by two environmental biologists (Northeastern University, China) under a 400 &#x000D7; optical microscope from 2018 to 2019. Then, four bioinformatics scientists (Northeastern University, China) manually prepared the object labeling files in &#x0201C;.XML&#x0201D; format corresponding to the original 2,365 images from 2020 to 2021. In the EM object labeling files, 42 types of EMs are labeled by their categories. In addition, the unknown EMs and impurities are marked as Unknown, and a total of 13,216 labeled objects are obtained. In <xref ref-type="table" rid="T1">Table 1</xref> we list the 42 categories of EMs included in EMDS-7. Also for a more visual presentation of our dataset, we list in the table detailed information about each microorganism category, such as the number of original images of each microorganism category, the total number of annotations of each microorganism category and the visible characteristics of each microorganism category. <xref ref-type="fig" rid="F2">Figure 2</xref> shows examples of 41 types of EMs and unknown objects in EMDS-7. The labeled files of EMDS-7 images are manually labeled base on the following two rules:</p>
<table-wrap position="float" id="T1">
<label>Table 1</label>
<caption><p>Basic information of 42 EM classes in EMDS-7.</p></caption>
<table frame="box" rules="all">
<thead>
<tr style="background-color:&#x00023;919498;color:&#x00023;ffffff">
<th valign="top" align="left"><bold>Classes</bold></th>
<th valign="top" align="center"><bold>NoOI</bold></th>
<th valign="top" align="center"><bold>NEMo</bold></th>
<th valign="top" align="left"><bold>VC</bold></th>
<th valign="top" align="left"><bold>Classes</bold></th>
<th valign="top" align="center"><bold>NoOI</bold></th>
<th valign="top" align="center"><bold>NEMo</bold></th>
<th valign="top" align="left"><bold>VC</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left"><italic>Oscillatoria</italic></td>
<td valign="top" align="center">41</td>
<td valign="top" align="center">178</td>
<td valign="top" align="left">Cylindrical</td>
<td valign="top" align="left"><italic>Staurastrum</italic></td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">9</td>
<td valign="top" align="left">Multi-radial symmetry</td>
</tr> <tr>
<td valign="top" align="left"><italic>Ankistrodesmus</italic></td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">45</td>
<td valign="top" align="left">Cell needle</td>
<td valign="top" align="left"><italic>Phormidium</italic></td>
<td valign="top" align="center">276</td>
<td valign="top" align="center">1,216</td>
<td valign="top" align="left">Plant body gelatinous or leathery</td>
</tr> <tr>
<td valign="top" align="left"><italic>Microcystis</italic></td>
<td valign="top" align="center">307</td>
<td valign="top" align="center">826</td>
<td valign="top" align="left">Spherical masses</td>
<td valign="top" align="left"><italic>Fragilaria</italic></td>
<td valign="top" align="center">55</td>
<td valign="top" align="center">59</td>
<td valign="top" align="left">Lanceolate</td>
</tr> <tr>
<td valign="top" align="left"><italic>Gomphonema</italic></td>
<td valign="top" align="center">87</td>
<td valign="top" align="center">108</td>
<td valign="top" align="left">Linear-lanceolate</td>
<td valign="top" align="left"><italic>Anabaenopsis</italic></td>
<td valign="top" align="center">22</td>
<td valign="top" align="center">37</td>
<td valign="top" align="left">Filaments</td>
</tr> <tr>
<td valign="top" align="left"><italic>Sphaerocystis</italic></td>
<td valign="top" align="center">55</td>
<td valign="top" align="center">53</td>
<td valign="top" align="left">Cell sphere</td>
<td valign="top" align="left"><italic>Coelosphaerium</italic></td>
<td valign="top" align="center">77</td>
<td valign="top" align="center">165</td>
<td valign="top" align="left">Group glue is generous and transparent</td>
</tr> <tr>
<td valign="top" align="left"><italic>Cosmarium</italic></td>
<td valign="top" align="center">17</td>
<td valign="top" align="center">28</td>
<td valign="top" align="left">cell side flattened</td>
<td valign="top" align="left"><italic>Crucigenia</italic></td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">16</td>
<td valign="top" align="left">Micelles with many cells</td>
</tr> <tr>
<td valign="top" align="left"><italic>Cocconeis</italic></td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">15</td>
<td valign="top" align="left">Flat, oval cells</td>
<td valign="top" align="left"><italic>Achnanthes</italic></td>
<td valign="top" align="center">18</td>
<td valign="top" align="center">19</td>
<td valign="top" align="left">Shell surface linear lanceolate</td>
</tr> <tr>
<td valign="top" align="left"><italic>Tribonema</italic></td>
<td valign="top" align="center">49</td>
<td valign="top" align="center">88</td>
<td valign="top" align="left">Yellow-green cotton-like</td>
<td valign="top" align="left"><italic>Synedra</italic></td>
<td valign="top" align="center">77</td>
<td valign="top" align="center">206</td>
<td valign="top" align="left">Shell needle</td>
</tr> <tr>
<td valign="top" align="left"><italic>Chlorella</italic></td>
<td valign="top" align="center">80</td>
<td valign="top" align="center">155</td>
<td valign="top" align="left">Small round or slightly oval</td>
<td valign="top" align="left"><italic>Ceratium</italic></td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">24</td>
<td valign="top" align="left">Flat back and abdomen</td>
</tr> <tr>
<td valign="top" align="left"><italic>Tetraedron</italic></td>
<td valign="top" align="center">25</td>
<td valign="top" align="center">66</td>
<td valign="top" align="left">Flat or pyramidal</td>
<td valign="top" align="left"><italic>Pompholyx</italic></td>
<td valign="top" align="center">49</td>
<td valign="top" align="center">51</td>
<td valign="top" align="left">Carapace Oval or Shield Shape</td>
</tr> <tr>
<td valign="top" align="left"><italic>Ankistrodesmus</italic></td>
<td valign="top" align="center">64</td>
<td valign="top" align="center">84</td>
<td valign="top" align="left">Needle to spindle</td>
<td valign="top" align="left"><italic>Merismopedia</italic></td>
<td valign="top" align="center">33</td>
<td valign="top" align="center">38</td>
<td valign="top" align="left">Flat group</td>
</tr> <tr>
<td valign="top" align="left"><italic>Brachionus</italic></td>
<td valign="top" align="center">113</td>
<td valign="top" align="center">144</td>
<td valign="top" align="left">quilt is wider and square</td>
<td valign="top" align="left"><italic>Spirogyra</italic></td>
<td valign="top" align="center">89</td>
<td valign="top" align="center">134</td>
<td valign="top" align="left">Strip, spiral</td>
</tr> <tr>
<td valign="top" align="left"><italic>Chaenea</italic></td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">13</td>
<td valign="top" align="left">Cylindrical or spindle</td>
<td valign="top" align="left"><italic>Coelastrum</italic></td>
<td valign="top" align="center">29</td>
<td valign="top" align="center">30</td>
<td valign="top" align="left">Spherical, oval or truncated pyramid</td>
</tr> <tr>
<td valign="top" align="left"><italic>Pediastrum</italic></td>
<td valign="top" align="center">95</td>
<td valign="top" align="center">105</td>
<td valign="top" align="left">Disc or star</td>
<td valign="top" align="left"><italic>Raphidiopsis</italic></td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">19</td>
<td valign="top" align="left">Curved</td>
</tr> <tr>
<td valign="top" align="left"><italic>Spirulina</italic></td>
<td valign="top" align="center">18</td>
<td valign="top" align="center">73</td>
<td valign="top" align="left">Spiral</td>
<td valign="top" align="left"><italic>Gomphosphaeria</italic></td>
<td valign="top" align="center">58</td>
<td valign="top" align="center">79</td>
<td valign="top" align="left">Oval tiny groups</td>
</tr> <tr>
<td valign="top" align="left"><italic>Actinastrum</italic></td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">181</td>
<td valign="top" align="left">Wide round or pointed</td>
<td valign="top" align="left"><italic>Euglena</italic></td>
<td valign="top" align="center">81</td>
<td valign="top" align="center">81</td>
<td valign="top" align="left">Spindle to needle</td>
</tr> <tr>
<td valign="top" align="left"><italic>Navicula</italic></td>
<td valign="top" align="center">75</td>
<td valign="top" align="center">90</td>
<td valign="top" align="left">Shell surface fusiform or oval</td>
<td valign="top" align="left"><italic>Euchlanis</italic></td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">13</td>
<td valign="top" align="left">Oval or pear-shaped</td>
</tr> <tr>
<td valign="top" align="left"><italic>Scenedesmus</italic></td>
<td valign="top" align="center">86</td>
<td valign="top" align="center">139</td>
<td valign="top" align="left">Oval or spindle</td>
<td valign="top" align="left"><italic>Keratella</italic></td>
<td valign="top" align="center">65</td>
<td valign="top" align="center">69</td>
<td valign="top" align="left">Irregular spines</td>
</tr> <tr>
<td valign="top" align="left"><italic>Golenkinia</italic></td>
<td valign="top" align="center">60</td>
<td valign="top" align="center">279</td>
<td valign="top" align="left">Irregularly slender bristles</td>
<td valign="top" align="left"><italic>diversicornis</italic></td>
<td valign="top" align="center">89</td>
<td valign="top" align="center">99</td>
<td valign="top" align="left">Feet have toes and quilt</td>
</tr> <tr>
<td valign="top" align="left"><italic>Pinnularia</italic></td>
<td valign="top" align="center">36</td>
<td valign="top" align="center">38</td>
<td valign="top" align="left">Oval to boat</td>
<td valign="top" align="left"><italic>Surirella</italic></td>
<td valign="top" align="center">22</td>
<td valign="top" align="center">37</td>
<td valign="top" align="left">False shell seam</td>
</tr> <tr>
<td valign="top" align="left"><italic>unknown</italic></td>
<td/>
<td valign="top" align="center">8088</td>
<td valign="top" align="left">Unknown EM and impurities</td>
<td valign="top" align="left"><italic>Characium</italic></td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">19</td>
<td valign="top" align="left">Spindle</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>NoOI, Number of original images; NEMo, Number of EM object; VC, Visible characteristics.</p>
</table-wrap-foot>
</table-wrap>
<fig id="F2" position="float">
<label>Figure 2</label>
<caption><p>An example of EM images in EMDS-7 (The red boxes are labeled EM objects).</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmicb-14-1084312-g0002.tif"/>
</fig>
<p>Rule-A: All identifiable EMs that appear completely or more than 60% of their own in all images are marked with category labels corresponding to 41 categories.</p>
<p>Rule-B: Unknown EMs that are &#x0003C; 40% of their own in all images and EMs other than 41 categories in this database. In addition, some obvious impurities in the background of the image are marked as unknown.</p>
<p>EMDS-7 is freely published for non-commercial purpose at: <ext-link ext-link-type="uri" xlink:href="https://figshare.com/articles/dataset/EMDS-7_DataSet/16869571">https://figshare.com/articles/dataset/EMDS-7_DataSet/16869571</ext-link>.</p>
</sec>
<sec id="s3">
<title>3. Object detection methods for EMDS-7 evaluation in this paper</title>
<p>In this paper, five object detection models are selected to demonstrate the effectiveness of EMDS-7. The five models include one-stage detection model and two-stage detection models. Among them, the one-stage object detection algorithms, which are characterized by one-step, only need to feed the network once to predict all the bounding boxes, this type of algorithms are relatively low accuracy, but relatively fast, and very suitable for mobile. We choose YOLOv3, YOLOv4, SSD, and RetinaNet as one-stage object detection models in this paper. In contrast, two-stage model will first generate region proposals (regions that may contain objects) and then classify each region proposals. This type of algorithms is relatively accurate, but relatively slow because they require multiple runs of the detection and classification process. We choose <italic>Faster-Region Convolutional Neural Network</italic> (Faster-RCNN) as a two-stage object detection model in this paper. Finally, we analyze the variability of different deep learning networks in EMDS-7 from the above two directions.</p>
<sec>
<title>3.1. YOLOv3</title>
<p>The object detection model of the YOLO series is a one-stage detection network, which can locate and classify the objects at the same time. The advantage is that the training speed is fast with less time-consuming. One of the most types is YOLOv3. Joseph Redmon and others used the new basic network darknet-53 in the backbone of YOLOv3 for physical sign extraction (Redmon et al., <xref ref-type="bibr" rid="B34">2016</xref>). It contains 53 convolutional layers and introduces a residual structure so that the network can reach a deep level while avoiding the problem of gradient disappearance. In addition, darknet-53 removes the pooling layer and uses a convolutional layer with a step size of 2 to reduce the dimensionality of the feature map, which can maintain the information transmission better. And YOLOv3 also has excellent structures such as anchor and FPN (Lin et al., <xref ref-type="bibr" rid="B21">2017a</xref>).</p>
</sec>
<sec>
<title>3.2. YOLOv4</title>
<p>YOLOv4 is an improved version based on YOLOv3, which adds CSP and PAN structures (Bochkovskiy et al., <xref ref-type="bibr" rid="B3">2020</xref>). The backbone network of YOLOv3 is modified to CSPDarkne53, and add an spp (spatial pyramid pooling) idea behind the backbone network to expand the receptive field, using 1 &#x000D7; 1, 5 &#x000D7; 5, 9 &#x000D7; 9, 13 &#x000D7; 13 as the largest Pooling, multi-scale fusion, and improve the accuracy of the model. At the same time, in the neck network of YOLOv4, there are Feature Pyramid Network (FPN) (Lin et al., <xref ref-type="bibr" rid="B21">2017a</xref>), Path Aggregation Network (PAN), BiFPN, and NAS-FPN, which can collect different feature maps more effectively.</p>
</sec>
<sec>
<title>3.3. SSD</title>
<p>SSD is another striking object detection network after YOLO. SSD has two major advantages (Liu et al., <xref ref-type="bibr" rid="B23">2016</xref>). First, SSD extracts feature maps of different scales for detection. Large-scale feature maps can be used to detect small objects, while small-scale feature maps can be used to detect large objects. Second, SSD uses different Prior boxes (Prior boxes, Default boxes, Anchors) for scale and aspect ratio. It follows the method of direct regression box and classification probability in YOLO, and uses anchors to improve recognition accuracy referring to Faster R-CNN. By combining these two networks, SSD balances the advantages and disadvantages of Faster R-CNN and YOLO.</p>
</sec>
<sec>
<title>3.4. RetinaNet</title>
<p>The RetinaNet object detection model is also a one-stage object detection network. RetinaNet essentially consists of a backbone network (BackBone) and two subnets (SubNet) (Lin et al., <xref ref-type="bibr" rid="B22">2017b</xref>). The backbone network is responsible for calculating the convolution feature map on the entire input image, which is composed of the ResNet residual network and the FPN feature pyramid network. The two sub-networks use the features extracted from the backbone network to achieve their respective functions (Lin et al., <xref ref-type="bibr" rid="B21">2017a</xref>). The first sub-network completes the classification task; the second sub-network completes the bounding box regression task.</p>
</sec>
<sec>
<title>3.5. Faster RCNN</title>
<p>Faster RCNN generates candidate frames based on the Anchor mechanism by adding a region proposal networks (RPN), and finally integrates feature extraction, candidate frame selection, frame regression, and classification into one network, thereby the detection accuracy and efficiency can be effectively improved (Ren et al., <xref ref-type="bibr" rid="B35">2015</xref>). Faster RCNN performs classification and detection of foreground and background in the RPN network structure, optimizes the complexity of picking samples, makes positive and negative samples become more balanced, and then focuses on some parameters for classification training. For the first stage of object detection, it has to do both localization and classification, and there is no clear division of labor which part is dedicated to classification and which part is dedicated to regression of prediction frames, so that the learning difficulty increases for each parameter. Therefore, the classification difficulty of the second stage training will be much easier than the first stage target detection to do mixed classification and prediction frame regression directly.</p>
</sec>
</sec>
<sec id="s4">
<title>4. Evaluation of deep learning object detection methods</title>
<p>Object detection is an important part of image analysis (Sun et al., <xref ref-type="bibr" rid="B39">2020</xref>). To prove the effectiveness of EMDS-7 in object detection and evaluation, we use five different deep learning object detection models to detect EMs in the EMDS-7 data set. The five models are YOLOv3 (Redmon et al., <xref ref-type="bibr" rid="B34">2016</xref>), YOLOv4 (Bochkovskiy et al., <xref ref-type="bibr" rid="B3">2020</xref>), SSD (Liu et al., <xref ref-type="bibr" rid="B23">2016</xref>), RetinaNet (Lin et al., <xref ref-type="bibr" rid="B22">2017b</xref>), and Faster RCNN (Ren et al., <xref ref-type="bibr" rid="B35">2015</xref>). Because the number of images of each category of EM in the EMDS-7 data set is different, we divide each category of Ems data set into the training, validation and test set according to 6:2:2 to ensure each sets has 42 types of EMs (Chen et al., <xref ref-type="bibr" rid="B7">2021</xref>). We train EMDS-7 for five different kinds of deep learning object detection model, and then, respectively, predict the images of the test set (Wang et al., <xref ref-type="bibr" rid="B40">2022</xref>). We calculate the number of EM objects in 456 EMS images. We set the threshold of the predictive frame confidence to be 0.5, when the model predicts the object&#x00027;s confidence is &#x0003E;0.5, the prediction box is displayed, and Intersection over Union (IOU) is set to 0.3. In <xref ref-type="fig" rid="F3">Figure 3</xref>, we summarize the Average Precision (AP) value of each class of EMs. Analysis of predict results is shown in <xref ref-type="table" rid="T2">Table 2</xref>. We also illustrate the location of the prediction box in the EMS image, and some samples are shown in <xref ref-type="fig" rid="F4">Figure 4</xref>.</p>
<fig id="F3" position="float">
<label>Figure 3</label>
<caption><p>Each category of EM object detection prediction AP value in EMDS-7.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmicb-14-1084312-g0003.tif"/>
</fig>
<table-wrap position="float" id="T2">
<label>Table 2</label>
<caption><p>A comparison of the object detection results on test set of EMs.</p></caption>
<table frame="box" rules="all">
<thead>
<tr style="background-color:&#x00023;919498;color:&#x00023;ffffff">
<th/>
<th/>
<th valign="top" align="center" colspan="2"><bold>YOLOv3</bold></th>
<th valign="top" align="center" colspan="2"><bold>YOLOv4</bold></th>
<th valign="top" align="center" colspan="2"><bold>SSD</bold></th>
<th valign="top" align="center" colspan="2"><bold>RetinaNet</bold></th>
<th valign="top" align="center" colspan="2"><bold>Faster RCNN</bold></th>
</tr>
<tr style="background-color:&#x00023;919498;color:&#x00023;ffffff">
<th valign="top" align="left"><bold>EMS</bold></th>
<th valign="top" align="center"><bold>GTO</bold></th>
<th valign="top" align="center"><bold>tp</bold></th>
<th valign="top" align="center"><bold>fp</bold></th>
<th valign="top" align="center"><bold>tp</bold></th>
<th valign="top" align="center"><bold>fp</bold></th>
<th valign="top" align="center"><bold>tp</bold></th>
<th valign="top" align="center"><bold>fp</bold></th>
<th valign="top" align="center"><bold>tp</bold></th>
<th valign="top" align="center"><bold>fp</bold></th>
<th valign="top" align="center"><bold>tp</bold></th>
<th valign="top" align="center"><bold>fp</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left"><italic>Oscillatoria</italic></td>
<td valign="top" align="center">26</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">25</td>
</tr> <tr>
<td valign="top" align="left"><italic>Ankistrodesmus</italic></td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">2</td>
</tr> <tr>
<td valign="top" align="left"><italic>Microcystis</italic></td>
<td valign="top" align="center">151</td>
<td valign="top" align="center">105</td>
<td valign="top" align="center">95</td>
<td valign="top" align="center">87</td>
<td valign="top" align="center">37</td>
<td valign="top" align="center">83</td>
<td valign="top" align="center">35</td>
<td valign="top" align="center">94</td>
<td valign="top" align="center">31</td>
<td valign="top" align="center">118</td>
<td valign="top" align="center">112</td>
</tr> <tr>
<td valign="top" align="left"><italic>Gomphonema</italic></td>
<td valign="top" align="center">19</td>
<td valign="top" align="center">19</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">18</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">16</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">18</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">19</td>
<td valign="top" align="center">5</td>
</tr> <tr>
<td valign="top" align="left"><italic>Sphaerocystis</italic></td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">0</td>
</tr> <tr>
<td valign="top" align="left"><italic>Cosmarium</italic></td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">2</td>
</tr> <tr>
<td valign="top" align="left"><italic>Cocconeis</italic></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Tribonema</italic></td>
<td valign="top" align="center">17</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">5</td>
</tr> <tr>
<td valign="top" align="left"><italic>Chlorella</italic></td>
<td valign="top" align="center">34</td>
<td valign="top" align="center">18</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">15</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">16</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">15</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">24</td>
</tr> <tr>
<td valign="top" align="left"><italic>Tetraedron</italic></td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">3</td>
</tr> <tr>
<td valign="top" align="left"><italic>Ankistrodesmus</italic></td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">12</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">12</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">6</td>
</tr> <tr>
<td valign="top" align="left"><italic>Brachionus</italic></td>
<td valign="top" align="center">25</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">17</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">25</td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">25</td>
<td valign="top" align="center">19</td>
</tr> <tr>
<td valign="top" align="left"><italic>Chaenea</italic></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Pediastrum</italic></td>
<td valign="top" align="center">24</td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Spirulina</italic></td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">6</td>
</tr> <tr>
<td valign="top" align="left"><italic>Actinastrum</italic></td>
<td valign="top" align="center">40</td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">18</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">29</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">29</td>
<td valign="top" align="center">14</td>
</tr> <tr>
<td valign="top" align="left"><italic>Navicula</italic></td>
<td valign="top" align="center">16</td>
<td valign="top" align="center">12</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">12</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">15</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">7</td>
</tr> <tr>
<td valign="top" align="left"><italic>Scenedesmus</italic></td>
<td valign="top" align="center">25</td>
<td valign="top" align="center">19</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">19</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">18</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">19</td>
<td valign="top" align="center">7</td>
</tr> <tr>
<td valign="top" align="left"><italic>Golenkinia</italic></td>
<td valign="top" align="center">41</td>
<td valign="top" align="center">32</td>
<td valign="top" align="center">35</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">29</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">29</td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">31</td>
<td valign="top" align="center">25</td>
</tr> <tr>
<td valign="top" align="left"><italic>Pinnularia</italic></td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">3</td>
</tr> <tr>
<td valign="top" align="left"><italic>Staurastrum</italic></td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Phormidium</italic></td>
<td valign="top" align="center">234</td>
<td valign="top" align="center">188</td>
<td valign="top" align="center">151</td>
<td valign="top" align="center">147</td>
<td valign="top" align="center">73</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">142</td>
<td valign="top" align="center">32</td>
<td valign="top" align="center">194</td>
<td valign="top" align="center">127</td>
</tr> <tr>
<td valign="top" align="left"><italic>Fragilaria</italic></td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">3</td>
</tr> <tr>
<td valign="top" align="left"><italic>Anabaenopsis</italic></td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Coelosphaerium</italic></td>
<td valign="top" align="center">42</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">12</td>
</tr> <tr>
<td valign="top" align="left"><italic>Crucigenia</italic></td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Achnanthes</italic></td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">0</td>
</tr> <tr>
<td valign="top" align="left"><italic>Synedra</italic></td>
<td valign="top" align="center">34</td>
<td valign="top" align="center">23</td>
<td valign="top" align="center">29</td>
<td valign="top" align="center">16</td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">15</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">27</td>
<td valign="top" align="center">21</td>
</tr> <tr>
<td valign="top" align="left"><italic>Ceratium</italic></td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
</tr> <tr>
<td valign="top" align="left"><italic>Pompholyx</italic></td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">5</td>
</tr> <tr>
<td valign="top" align="left"><italic>Merismopedia</italic></td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Spirogyra</italic></td>
<td valign="top" align="center">25</td>
<td valign="top" align="center">19</td>
<td valign="top" align="center">16</td>
<td valign="top" align="center">12</td>
<td valign="top" align="center">16</td>
<td valign="top" align="center">20</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">9</td>
</tr> <tr>
<td valign="top" align="left"><italic>Coelastrum</italic></td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">0</td>
</tr> <tr>
<td valign="top" align="left"><italic>Raphidiopsis</italic></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Gomphosphaeria</italic></td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">8</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">6</td>
</tr> <tr>
<td valign="top" align="left"><italic>Euglena</italic></td>
<td valign="top" align="center">16</td>
<td valign="top" align="center">15</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">15</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">12</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">16</td>
<td valign="top" align="center">1</td>
</tr> <tr>
<td valign="top" align="left"><italic>Euchlanis</italic></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0</td>
</tr> <tr>
<td valign="top" align="left"><italic>Keratella</italic></td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">9</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">4</td>
</tr> <tr>
<td valign="top" align="left"><italic>diversicornis</italic></td>
<td valign="top" align="center">18</td>
<td valign="top" align="center">17</td>
<td valign="top" align="center">17</td>
<td valign="top" align="center">13</td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">14</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">19</td>
<td valign="top" align="center">11</td>
<td valign="top" align="center">17</td>
<td valign="top" align="center">4</td>
</tr> <tr>
<td valign="top" align="left"><italic>Surirella</italic></td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">3</td>
</tr> <tr>
<td valign="top" align="left"><italic>Characium</italic></td>
<td valign="top" align="center">5</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">3</td>
<td valign="top" align="center">1</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">4</td>
<td valign="top" align="center">2</td>
</tr> <tr>
<td valign="top" align="left"><italic>unknown</italic></td>
<td valign="top" align="center">1429</td>
<td valign="top" align="center">1068</td>
<td valign="top" align="center">1126</td>
<td valign="top" align="center">610</td>
<td valign="top" align="center">177</td>
<td valign="top" align="center">371</td>
<td valign="top" align="center">66</td>
<td valign="top" align="center">622</td>
<td valign="top" align="center">146</td>
<td valign="top" align="center">1049</td>
<td valign="top" align="center">1585</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>GTO, ground ture object; tp, ture positive, fp, flase positive (In [%]).</p>
</table-wrap-foot>
</table-wrap>
<fig id="F4" position="float">
<label>Figure 4</label>
<caption><p>Five object detection model prediction results in EMDS-7 (the microorganisms predicted by the five models are marked with five color boxes respectively. YOLOv3, white; YOLOv4, yellow; SSD, blue; RetinaNet, red; Fast RCNN, gray).</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fmicb-14-1084312-g0004.tif"/>
</fig>
<p>In <xref ref-type="fig" rid="F3">Figure 3</xref> and <xref ref-type="table" rid="T2">Table 2</xref> we can see that the EMDS-7 data set performs well in the task of object detection, and most of the EMs can be accurately identified. Meanwhile, different object detection models differ in the object detection effect of EMDS-7, which proves that EMDS-7 dataset provide performance analysis of different networks. In <xref ref-type="fig" rid="F3">Figure 3</xref> we calculate five models of the Mean Average Precision (MAP), which the Faster RCNN value of 76.05%, the YOLOv3 value of 58.61%, the YOLOv4 value of 40.30%, the SSD value of 57.83%, the RetinaNet value of 57.96%. We can see that Faster RCNN has the highest detection performance of EMDS-7. YOLOv3, SSD, RetinaNet model predicts that the MAP value is similar. The lowest is YOLOv4. Our EMDS-7 dataset is prepared for tiny object detection tasks, which is different for regular images. Although YOLOv4 is an improved version of YOLOv3, their structures have different performance for multi-scale small object inspection. YOLOv3 is also widely used in industry, and because v4 adds CSP and PAN structures to YOLOv3, YOLOv3 is less than YOLOv4 in terms of computational resources. However, some EMS category predictive AP values can reach 100%, while some kind of AP values are 0%. We find that there are little AP value is 100% or 0%, and the category of EMs in the image is basically consistent and the EMs characteristics are relatively large. In addition, different models are different from the method of extracting features, so differentiation occurs when model training. For example, in the object detection of Euglena category, the predictive sample has only two, and the AP value is 100% in the FASTER RCNN, but in the RetinaNet is 0%. We list the prediction results of the five models in <xref ref-type="table" rid="T2">Table 2</xref>. For example, the true sample size of the <italic>Microcystis</italic> prediction set was 151, of which the faster-RCNN model correctly identified 118, while the incorrect identification was 112. The ability of Faster RCNN and YOLOv3 to correctly detect <italic>Microcystis</italic> is higher than the other three models. However, the number of <italic>Microcystis</italic> incorrectly detecte by YOLOv4, SSD, and RetinaNet is smaller than that of Faster RCNN and YOLOv3. The Faster RCNN and YOLOv3 models we trained are more capable of learning to detect <italic>Microcystis</italic>, so that the fp and tp are both higher. Also among the five models, the Faster-RCNN had the highest number of correct identifications for <italic>Microcystis</italic> species, and the SSD had the lowest number of correct identifications at 83. <xref ref-type="table" rid="T2">Table 2</xref> describes the predictions of the five models for each category of EMs. In synthesis, the EMDS-7 database can provide analytical performance for different object detection models.</p>
</sec>
<sec sec-type="discussion" id="s5">
<title>5. Discussion</title>
<p><xref ref-type="table" rid="T3">Table 3</xref> shows the development history process of the EMDS versions. Seven versions of the EMDS were published, and different versions of the dataset have different features. Both EMDS-1 and EMDS-2 contain 10 classes of EMs with 20 original images and 20 GT images per class, which can be used for image classification and segmentation. No new features were added to EMDS-3. However, we have extended five classes of EMs. Compared with EMDS-3, EMDS-4 has been extended with six new classes of EMs and added a new image retrieval function. In EMDS-5, 420 single-object GT images and 420 multi-object GT images are prepared, respectively. Thus, EMDS-5 supports more functions. Based on EMDS-5, EMDS-6 adds 420 original images and 420 multi-object GT images. With the support of more data volume, EMDS-6 can realize more functions in a better and more stable way. EMDS-7 is specially applied to the object detection dataset with more sufficient data volume than the previous versions, so that the EM object function can be realized in a better and more stable way. In addition, we have prepared label files corresponding to each image.</p>
<table-wrap position="float" id="T3">
<label>Table 3</label>
<caption><p>EMDS history versions and latest versions.</p></caption>
<table frame="box" rules="all">
<thead>
<tr style="background-color:&#x00023;919498;color:&#x00023;ffffff">
<th valign="top" align="left"><bold>Dataset</bold></th>
<th valign="top" align="center"><bold>ECN</bold></th>
<th valign="top" align="center"><bold>OIN</bold></th>
<th valign="top" align="left"><bold>GTIN</bold></th>
<th valign="top" align="left"><bold>Functions</bold></th>
<th valign="top" align="left"><bold>Dataset link</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">EMDS-1 (Li et al., <xref ref-type="bibr" rid="B17">2013</xref>)</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">200</td>
<td valign="top" align="left">200</td>
<td valign="top" align="left">IC, IS</td>
<td/>
</tr> <tr>
<td valign="top" align="left">EMDS-2 (Li et al., <xref ref-type="bibr" rid="B17">2013</xref>)</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">200</td>
<td valign="top" align="left">200</td>
<td valign="top" align="left">IC, IS</td>
<td/>
</tr> <tr>
<td valign="top" align="left">EMDS-3 (Li et al., <xref ref-type="bibr" rid="B16">2016</xref>)</td>
<td valign="top" align="center">15</td>
<td valign="top" align="center">300</td>
<td valign="top" align="left">200</td>
<td valign="top" align="left">IC, IS</td>
<td/>
</tr> <tr>
<td valign="top" align="left">EMDS-4 (Kosov et al., <xref ref-type="bibr" rid="B13">2018</xref>)</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">420</td>
<td valign="top" align="left">420</td>
<td valign="top" align="left">IC, IS, IR</td>
<td valign="top" align="left"><ext-link ext-link-type="uri" xlink:href="https://research.project-10.de/em-classiffication/">https://research.project-10.de/em-classiffication/</ext-link></td>
</tr> <tr>
<td valign="top" align="left">EMDS-5 (Li et al., <xref ref-type="bibr" rid="B20">2021</xref>)</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">420</td>
<td valign="top" align="left">840 (S,M 420)</td>
<td valign="top" align="left">ID, IED, SoIS, MoIS, SoFE, MoFE, IR</td>
<td valign="top" align="left"><ext-link ext-link-type="uri" xlink:href="https://github.com/NEUZihan/EMDS-5">https://github.com/NEUZihan/EMDS-5</ext-link></td>
</tr>
<tr>
<td valign="top" align="left">EMDS-6 (Zhao et al., <xref ref-type="bibr" rid="B51">2022b</xref>)</td>
<td valign="top" align="center">21</td>
<td valign="top" align="center">840</td>
<td valign="top" align="left">840</td>
<td valign="top" align="left">ID, IC, IS, IFE, IOD</td>
<td valign="top" align="left"><ext-link ext-link-type="uri" xlink:href="https://figshare.com/articles/dataset/EMDS6/17125025/1">https://figshare.com/articles/dataset/EMDS6/17125025/1</ext-link></td>
</tr> <tr>
<td valign="top" align="left">EMDS-7 [In this article]</td>
<td valign="top" align="center">42</td>
<td valign="top" align="center">2365</td>
<td valign="top" align="left">2365 (&#x02018;xml&#x00027;)</td>
<td valign="top" align="left">IOD</td>
<td valign="top" align="left"><ext-link ext-link-type="uri" xlink:href="https://figshare.com/articles/dataset/EMDS-7_DataSet/16869571">https://figshare.com/articles/dataset/EMDS-7_DataSet/16869571</ext-link></td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>IC, Image Classification; IS, Image Segmentation; SoIS, Single-object Image Segmentation; MoIS, Multi-object Image Segmentation; SoFE, Single-object Feature Extraction; MoFE, Multi-object Feature Extraction; IR, Image Retrieval; IFE, Image Feature Extraction; IOD, Image Object Detection; IED, Image Edge Detection; ID, Image denoising; ECN, EM Class Number; OIN, Original Image Number; GTIN, Ground Truth Image Number; S, Single Object; M, Multiple object.</p>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="s6">
<title>6. Conclusion and future work</title>
<p>EMDS-7 is an object detection data set containing 42 types of EMs, which contains the original image of EMs and object label data for corresponding EMs. EMDS-7 labeled 15342 EMs. At the same time, we further add some deep learning object detection experiments to the EMDS-7 database to prove the effectiveness. During the object detection process, we divide the data set according to 6: 2: 2 for train, validation and test sets. We use five different deep learning object detection methods to test EMDS-7 and use multiple evaluation indices to evaluate the prediction results. According to our experiments, EMDS-7 behaves differently in different deep learning models, so EMDS-7 can provide an analysis of the performance of different networks. Meanwhile, in this paper EMDS-7 has the highest accuracy on the Faster RCNN prediction test set, and its map is 76.05%, which has achieved good performance in the deep learning of object detection.</p>
<p>In the future, we will enlarge the category of EMs to increase the number of images of each EM. Make each class data balanced and sufficient. We hope to use the EMDS-7 database to achieve more features in the future.</p>
</sec>
<sec sec-type="data-availability" id="s7">
<title>Data availability statement</title>
<p>The datasets presented in this study can be found in online repositories. The names of the repository/repositories and accession number(s) can be found below: <ext-link ext-link-type="uri" xlink:href="https://figshare.com/articles/dataset/EMDS-7_DataSet/16869571">https://figshare.com/articles/dataset/EMDS-7_DataSet/16869571</ext-link>.</p>
</sec>
<sec sec-type="author-contributions" id="s8">
<title>Author contributions</title>
<p>HY: data, experiment, and writing. CL: corresponding author, team leader, method, data, experiment, and writing. XZ: corresponding author, data, and writing. BC, JZ, and PM: data. PZ: experiment. AC and HS: method. TJ, YT, and SQ: result analysis. XH and MG: proofreading. All authors contributed to the article and approved the submitted version.</p>
</sec>
</body>
<back>
<sec sec-type="funding-information" id="s9">
<title>Funding</title>
<p>This work was supported by the National Natural Science Foundation of China (No. 82220108007), Scientific Research Fund of Sichuan Provincial Science and Technology Department (No. 2021YFH0069), and Scientific Research Fund of Chengdu Science and Technology Bureau (Nos. 2022-YF05-01186-SN and 2022-YF05-01128-SN).</p>
</sec>
<ack><p>We thank Miss. Zixian Li and Mr. Guoxian Li for their important discussion.</p>
</ack>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s10">
<title>Publisher&#x00027;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Abdulhay</surname> <given-names>E.</given-names></name> <name><surname>Mohammed</surname> <given-names>M. A.</given-names></name> <name><surname>Ibrahim</surname> <given-names>D. A.</given-names></name> <name><surname>Arunkumar</surname> <given-names>N.</given-names></name> <name><surname>Venkatraman</surname> <given-names>V.</given-names></name></person-group> (<year>2018</year>). <article-title>Computer aided solution for automatic segmenting and measurements of blood leucocytes using static microscope images</article-title>. <source>J. Med. Syst</source>. <volume>42</volume>, <fpage>1</fpage>&#x02013;<lpage>12</lpage>. <pub-id pub-id-type="doi">10.1007/s10916-018-0912-y</pub-id><pub-id pub-id-type="pmid">29455440</pub-id></citation></ref>
<ref id="B2">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Anand</surname> <given-names>U.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Sunita</surname> <given-names>K.</given-names></name> <name><surname>Lokhandwala</surname> <given-names>S.</given-names></name> <name><surname>Gautam</surname> <given-names>P.</given-names></name> <name><surname>Suresh</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>SARS-CoV-2 and other pathogens in municipal wastewater, landfill leachate, and solid waste: a review about virus surveillance, infectivity, and inactivation</article-title>. <source>Environ. Res</source>. 2021, 111839. <pub-id pub-id-type="doi">10.1016/j.envres.2021.111839</pub-id><pub-id pub-id-type="pmid">34358502</pub-id></citation></ref>
<ref id="B3">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Bochkovskiy</surname> <given-names>A.</given-names></name> <name><surname>Wang</surname> <given-names>C.-Y.</given-names></name> <name><surname>Liao</surname> <given-names>H.-Y. M.</given-names></name></person-group> (<year>2020</year>). <article-title>Yolov4: optimal speed and accuracy of object detection</article-title>. <source>arXiv preprint arXiv:2004.10934</source>. <pub-id pub-id-type="doi">10.48550/arXiv.2004.10934</pub-id></citation>
</ref>
<ref id="B4">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>A.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Zou</surname> <given-names>S.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Chen</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>Svia dataset: a new dataset of microscopic videos and images for computer-aided sperm analysis</article-title>. <source>Biocybern. Biomed. Eng</source>. <volume>42</volume>, <fpage>204</fpage>&#x02013;<lpage>214</lpage>. <pub-id pub-id-type="doi">10.1016/j.bbe.2021.12.010</pub-id></citation>
</ref>
<ref id="B5">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>H.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Hu</surname> <given-names>W.</given-names></name> <name><surname>Li</surname> <given-names>Y.</given-names></name> <etal/></person-group>. (<year>2022a</year>). <article-title>Il-mcam: an interactive learning and multi-channel attention mechanism-based weakly supervised colorectal histopathology image classification approach</article-title>. <source>Comput. Biol. Med</source>. 143, 105265. <pub-id pub-id-type="doi">10.1016/j.compbiomed.2022.105265</pub-id><pub-id pub-id-type="pmid">35123138</pub-id></citation></ref>
<ref id="B6">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>H.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Wang</surname> <given-names>G.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2022b</year>). <article-title>Gashis-transformer: a multi-scale visual transformer approach for gastric histopathological image detection</article-title>. <source>Pattern Recognit</source>. 130, 108827. <pub-id pub-id-type="doi">10.1016/j.patcog.2022.108827</pub-id></citation>
</ref>
<ref id="B7">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>J.</given-names></name> <name><surname>Sun</surname> <given-names>S.</given-names></name> <name><surname>Zhang</surname> <given-names>L.-B.</given-names></name> <name><surname>Yang</surname> <given-names>B.</given-names></name> <name><surname>Wang</surname> <given-names>W.</given-names></name></person-group> (<year>2021</year>). <article-title>Compressed sensing framework for heart sound acquisition in internet of medical things</article-title>. <source>IEEE Trans. Ind. Inform</source>. <volume>18</volume>, <fpage>2000</fpage>&#x02013;<lpage>2009</lpage>. <pub-id pub-id-type="doi">10.1109/TII.2021.3088465</pub-id></citation>
</ref>
<ref id="B8">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Gonzalez</surname> <given-names>R. C.</given-names></name> <name><surname>Woods</surname> <given-names>R. E.</given-names></name></person-group> (<year>2002</year>). <source>Digital Image Processing, 2nd Edn</source>. <publisher-loc>Beijing</publisher-loc>: <publisher-name>Publishing House of Electronics Industry</publisher-name>.<pub-id pub-id-type="pmid">19926915</pub-id></citation></ref>
<ref id="B9">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hu</surname> <given-names>W.</given-names></name> <name><surname>Chen</surname> <given-names>H.</given-names></name> <name><surname>Liu</surname> <given-names>W.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name> <name><surname>Huang</surname> <given-names>X.</given-names></name> <etal/></person-group>. (<year>2022a</year>). <article-title>A comparative study of gastric histopathology sub-size image classification: from linear regression to visual transformer</article-title>. <source>arXiv preprint arXiv:2205.12843</source>. <pub-id pub-id-type="doi">10.3389/fmed.2022.1072109</pub-id><pub-id pub-id-type="pmid">36569152</pub-id></citation></ref>
<ref id="B10">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hu</surname> <given-names>W.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Ma</surname> <given-names>J.</given-names></name> <name><surname>Zhang</surname> <given-names>Y.</given-names></name> <etal/></person-group>. (<year>2022b</year>). <article-title>Gashissdb: a new gastric histopathology image dataset for computer aided diagnosis of gastric cancer</article-title>. <source>Comput. Biol. Med</source>. 142, 105207. <pub-id pub-id-type="doi">10.1016/j.compbiomed.2021.105207</pub-id><pub-id pub-id-type="pmid">35016101</pub-id></citation></ref>
<ref id="B11">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ji</surname> <given-names>F.</given-names></name> <name><surname>Yan</surname> <given-names>L.</given-names></name> <name><surname>Yan</surname> <given-names>S.</given-names></name> <name><surname>Qin</surname> <given-names>T.</given-names></name> <name><surname>Shen</surname> <given-names>J.</given-names></name> <name><surname>Zha</surname> <given-names>J.</given-names></name></person-group> (<year>2021</year>). <article-title>Estimating aquatic plant diversity and distribution in rivers from jingjinji region, china, using environmental dna metabarcoding and a traditional survey method</article-title>. <source>Environ. Res</source>. 199, 111348. <pub-id pub-id-type="doi">10.1016/j.envres.2021.111348</pub-id><pub-id pub-id-type="pmid">34029550</pub-id></citation></ref>
<ref id="B12">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kashyap</surname> <given-names>M.</given-names></name> <name><surname>Samadhiya</surname> <given-names>K.</given-names></name> <name><surname>Ghosh</surname> <given-names>A.</given-names></name> <name><surname>Anand</surname> <given-names>V.</given-names></name> <name><surname>Lee</surname> <given-names>H.</given-names></name> <name><surname>Sawamoto</surname> <given-names>N.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Synthesis, characterization and application of intracellular ag/agcl nanohybrids biosynthesized in scenedesmus sp. as neutral lipid inducer and antibacterial agent</article-title>. <source>Environ. Res</source>. 201, 111499. <pub-id pub-id-type="doi">10.1016/j.envres.2021.111499</pub-id><pub-id pub-id-type="pmid">34146525</pub-id></citation></ref>
<ref id="B13">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kosov</surname> <given-names>S.</given-names></name> <name><surname>Shirahama</surname> <given-names>K.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Grzegorzek</surname> <given-names>M.</given-names></name></person-group> (<year>2018</year>). <article-title>Environmental microorganism classification using conditional random fields and deep convolutional neural networks</article-title>. <source>Pattern Recognit</source>. <volume>77</volume>, <fpage>248</fpage>&#x02013;<lpage>261</lpage>. <pub-id pub-id-type="doi">10.1016/j.patcog.2017.12.021</pub-id></citation>
</ref>
<ref id="B14">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kulwa</surname> <given-names>F.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Grzegorzek</surname> <given-names>M.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Shirahama</surname> <given-names>K.</given-names></name> <name><surname>Kosov</surname> <given-names>S.</given-names></name></person-group> (<year>2023</year>). <article-title>Segmentation of weakly visible environmental microorganism images using pair-wise deep learning features</article-title>. <source>Biomed. Signal Process. Control</source>. 79, 104168. <pub-id pub-id-type="doi">10.1016/j.bspc.2022.104168</pub-id></citation>
</ref>
<ref id="B15">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Kulwa</surname> <given-names>F.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Shirahama</surname> <given-names>K.</given-names></name> <name><surname>Kosov</surname> <given-names>S.</given-names></name> <name><surname>Zhao</surname> <given-names>X.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>A new pairwise deep learning feature for environmental microorganism image analysis</article-title>. <source>Environ. Sci. Pollut. Res</source>. <volume>29</volume>, <fpage>51909</fpage>&#x02013;<lpage>51926</lpage>. <pub-id pub-id-type="doi">10.1007/s11356-022-18849-0</pub-id><pub-id pub-id-type="pmid">35257344</pub-id></citation></ref>
<ref id="B16">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Shirahama</surname> <given-names>K.</given-names></name> <name><surname>Grzegorzek</surname> <given-names>M.</given-names></name></person-group> (<year>2016</year>). <article-title>Environmental microbiology aided by content-based image analysis</article-title>. <source>Pattern Anal. Appl</source>. <volume>19</volume>, <fpage>531</fpage>&#x02013;<lpage>547</lpage>. <pub-id pub-id-type="doi">10.1007/s10044-015-0498-7</pub-id></citation>
</ref>
<ref id="B17">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Shirahama</surname> <given-names>K.</given-names></name> <name><surname>Grzegorzek</surname> <given-names>M.</given-names></name> <name><surname>Ma</surname> <given-names>F.</given-names></name> <name><surname>Zhou</surname> <given-names>B.</given-names></name></person-group> (<year>2013</year>). <article-title>&#x0201C;Classification of environmental microorganisms in microscopic images using shape features and support vector machines,&#x0201D;</article-title> in <source>2013 IEEE International Conference on Image Processing</source> (<publisher-loc>Melbourne, VIC</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>2435</fpage>&#x02013;<lpage>2439</lpage>.</citation>
</ref>
<ref id="B18">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Wang</surname> <given-names>K.</given-names></name> <name><surname>Xu</surname> <given-names>N.</given-names></name></person-group> (<year>2019</year>). <article-title>A survey for the applications of content-based microscopic image analysis in microorganism classification domains</article-title>. <source>Artif. Intell. Rev</source>. <volume>51</volume>, <fpage>577</fpage>&#x02013;<lpage>646</lpage>. <pub-id pub-id-type="doi">10.1007/s10462-017-9572-4</pub-id></citation>
</ref>
<ref id="B19">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Wu</surname> <given-names>J.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>A comprehensive review of computer-aided whole-slide image analysis: from datasets to feature extraction, segmentation, classification and detection approaches</article-title>. <source>Artif. Intell. Rev</source>. <volume>55</volume>, <fpage>4809</fpage>&#x02013;<lpage>4878</lpage>. <pub-id pub-id-type="doi">10.1007/s10462-021-10121-0</pub-id></citation>
</ref>
<ref id="B20">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>Z.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Xu</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Emds-5: environmental microorganism image dataset fifth version for multiple image analysis tasks</article-title>. <source>PLoS ONE</source> <volume>16</volume>, <fpage>e0250631</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0250631</pub-id><pub-id pub-id-type="pmid">33979356</pub-id></citation></ref>
<ref id="B21">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Lin</surname> <given-names>T.-Y.</given-names></name> <name><surname>Doll&#x000E1;r</surname> <given-names>P.</given-names></name> <name><surname>Girshick</surname> <given-names>R.</given-names></name> <name><surname>He</surname> <given-names>K.</given-names></name> <name><surname>Hariharan</surname> <given-names>B.</given-names></name> <name><surname>Belongie</surname> <given-names>S.</given-names></name></person-group> (<year>2017a</year>). <article-title>&#x0201C;Feature pyramid networks for object detection,&#x0201D;</article-title> in <source>Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition</source> (<publisher-loc>Honolulu, HI</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>2117</fpage>&#x02013;<lpage>2125</lpage>.</citation>
</ref>
<ref id="B22">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Lin</surname> <given-names>T.-Y.</given-names></name> <name><surname>Goyal</surname> <given-names>P.</given-names></name> <name><surname>Girshick</surname> <given-names>R.</given-names></name> <name><surname>He</surname> <given-names>K.</given-names></name> <name><surname>Doll&#x000E1;r</surname> <given-names>P.</given-names></name></person-group> (<year>2017b</year>). <article-title>&#x0201C;Focal loss for dense object detection,&#x0201D;</article-title> in <source>Proceedings of the IEEE International Conference on Computer Vision</source>, (<publisher-loc>Venice</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>2980</fpage>&#x02013;<lpage>2988</lpage>.<pub-id pub-id-type="pmid">35679384</pub-id></citation></ref>
<ref id="B23">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>W.</given-names></name> <name><surname>Anguelov</surname> <given-names>D.</given-names></name> <name><surname>Erhan</surname> <given-names>D.</given-names></name> <name><surname>Szegedy</surname> <given-names>C.</given-names></name> <name><surname>Reed</surname> <given-names>S.</given-names></name> <name><surname>Fu</surname> <given-names>C.-Y.</given-names></name> <etal/></person-group>. (<year>2016</year>). <article-title>&#x0201C;SSD: Single shot multibox detector,&#x0201D;</article-title> in <source>European Conference on Computer Vision</source> (<publisher-loc>Cham</publisher-loc>: <publisher-name>Springer</publisher-name>), <fpage>21</fpage>&#x02013;<lpage>37</lpage>.</citation>
</ref>
<ref id="B24">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>W.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Jiang</surname> <given-names>T.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name> <name><surname>Wu</surname> <given-names>X.</given-names></name> <etal/></person-group>. (<year>2022a</year>). <article-title>Is the aspect ratio of cells important in deep learning? a robust comparison of deep learning methods for multi-scale cytopathology cell image classification: from convolutional neural networks to visual transformers</article-title>. <source>Comput. Biol. Med</source>. 141, 105026. <pub-id pub-id-type="doi">10.1016/j.compbiomed.2021.105026</pub-id><pub-id pub-id-type="pmid">34801245</pub-id></citation></ref>
<ref id="B25">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>W.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Xu</surname> <given-names>N.</given-names></name> <name><surname>Jiang</surname> <given-names>T.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2022b</year>). <article-title>Cvm-cervix: a hybrid cervical pap-smear image classification framework using cnn, visual transformer and multilayer perceptron</article-title>. <source>Pattern Recogn</source>. 2022, 108829. <pub-id pub-id-type="doi">10.1016/j.patcog.2022.108829</pub-id></citation>
</ref>
<ref id="B26">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>X.</given-names></name> <name><surname>Fu</surname> <given-names>L.</given-names></name> <name><surname>Chun-Wei Lin</surname> <given-names>J.</given-names></name> <name><surname>Liu</surname> <given-names>S.</given-names></name></person-group> (<year>2022c</year>). <article-title>Sras-net: low-resolution chromosome image classification based on deep learning</article-title>. <source>IET Syst. Biol</source>. 2022, 12042. <pub-id pub-id-type="doi">10.1049/syb2.12042</pub-id><pub-id pub-id-type="pmid">35373918</pub-id></citation></ref>
<ref id="B27">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>X.</given-names></name> <name><surname>Wang</surname> <given-names>S.</given-names></name> <name><surname>Lin</surname> <given-names>J. C.-W.</given-names></name> <name><surname>Liu</surname> <given-names>S.</given-names></name></person-group> (<year>2022d</year>). <article-title>An algorithm for overlapping chromosome segmentation based on region selection</article-title>. <source>Neural Comput. Appl</source>. 1&#x02013;10. <pub-id pub-id-type="doi">10.1007/s00521-022-07317-y</pub-id></citation>
</ref>
<ref id="B28">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Lu</surname> <given-names>L.</given-names></name> <name><surname>Niu</surname> <given-names>X.</given-names></name> <name><surname>Zhang</surname> <given-names>D.</given-names></name> <name><surname>Ma</surname> <given-names>J.</given-names></name> <name><surname>Zheng</surname> <given-names>X.</given-names></name> <name><surname>Xiao</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>The algicidal efficacy and the mechanism of enterobacter sp. ea-1 on oscillatoria dominating in aquaculture system</article-title>. <source>Environ. Res</source>. 197, 111105. <pub-id pub-id-type="doi">10.1016/j.envres.2021.111105</pub-id><pub-id pub-id-type="pmid">33839120</pub-id></citation></ref>
<ref id="B29">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ma</surname> <given-names>P.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Zou</surname> <given-names>S.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>A state-of-the-art survey of object detection techniques in microorganism image analysis: from classical methods to deep learning approaches</article-title>. <source>Artif. Intell. Rev</source>. <volume>56</volume>, <fpage>1627</fpage>&#x02013;<lpage>1698</lpage>. <pub-id pub-id-type="doi">10.1007/s10462-022-10209-1</pub-id><pub-id pub-id-type="pmid">35693000</pub-id></citation></ref>
<ref id="B30">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pal</surname> <given-names>N. R.</given-names></name> <name><surname>Pal</surname> <given-names>S. K.</given-names></name></person-group> (<year>1993</year>). <article-title>A review on image segmentation techniques</article-title>. <source>Pattern Recognit</source>. <volume>26</volume>, <fpage>1277</fpage>&#x02013;<lpage>1294</lpage>. <pub-id pub-id-type="doi">10.1016/0031-3203(93)90135-J</pub-id></citation>
</ref>
<ref id="B31">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Wu</surname> <given-names>X.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Hu</surname> <given-names>Z.</given-names></name> <name><surname>Jiang</surname> <given-names>T.</given-names></name> <etal/></person-group>. (<year>2020a</year>). <article-title>A survey for cervical cytopathology image analysis using deep learning</article-title>. <source>IEEE Access</source> <volume>8</volume>, <fpage>61687</fpage>&#x02013;<lpage>61710</lpage>. <pub-id pub-id-type="doi">10.1109/ACCESS.2020.2983186</pub-id><pub-id pub-id-type="pmid">32814641</pub-id></citation></ref>
<ref id="B32">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Kulwa</surname> <given-names>F.</given-names></name> <name><surname>Rahman</surname> <given-names>M. A.</given-names></name> <name><surname>Wang</surname> <given-names>Q.</given-names></name> <etal/></person-group>. (<year>2020b</year>). <article-title>Identification of COVID-19 samples from chest x-ray images using deep learning: a comparison of transfer learning approaches</article-title>. <source>J. Xray Sci. Technol</source>. <volume>28</volume>, <fpage>821</fpage>&#x02013;<lpage>839</lpage>. <pub-id pub-id-type="doi">10.3233/XST-200715</pub-id><pub-id pub-id-type="pmid">32773400</pub-id></citation></ref>
<ref id="B33">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Kulwa</surname> <given-names>F.</given-names></name> <name><surname>Wu</surname> <given-names>X.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Deepcervix: a deep learning-based framework for the classification of cervical cells using hybrid deep feature fusion techniques</article-title>. <source>Comput. Biol. Med</source>. 136, 104649. <pub-id pub-id-type="doi">10.1016/j.compbiomed.2021.104649</pub-id><pub-id pub-id-type="pmid">34332347</pub-id></citation></ref>
<ref id="B34">
<citation citation-type="book"><person-group person-group-type="author"><name><surname>Redmon</surname> <given-names>J.</given-names></name> <name><surname>Divvala</surname> <given-names>S.</given-names></name> <name><surname>Girshick</surname> <given-names>R.</given-names></name> <name><surname>Farhadi</surname> <given-names>A.</given-names></name></person-group> (<year>2016</year>). <article-title>&#x0201C;You only look once: Unified, real-time object detection,&#x0201D;</article-title> in <source>Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition</source> (<publisher-loc>Las Vegas, NV</publisher-loc>: <publisher-name>IEEE</publisher-name>), <fpage>779</fpage>&#x02013;<lpage>788</lpage>.</citation>
</ref>
<ref id="B35">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ren</surname> <given-names>S.</given-names></name> <name><surname>He</surname> <given-names>K.</given-names></name> <name><surname>Girshick</surname> <given-names>R.</given-names></name> <name><surname>Sun</surname> <given-names>J.</given-names></name></person-group> (<year>2015</year>). <article-title>Faster R-CNN: towards real-time object detection with region proposal networks</article-title>. <source>Adv. Neural Inf. Process. Syst</source>. <volume>28</volume>, <fpage>91</fpage>&#x02013;<lpage>99</lpage>. <pub-id pub-id-type="doi">10.48550/arXiv.1506.01497</pub-id><pub-id pub-id-type="pmid">27295650</pub-id></citation></ref>
<ref id="B36">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Rodriguez</surname> <given-names>A.</given-names></name> <name><surname>Sesena</surname> <given-names>S.</given-names></name> <name><surname>Sanchez</surname> <given-names>E.</given-names></name> <name><surname>Rodriguez</surname> <given-names>M.</given-names></name> <name><surname>Palop</surname> <given-names>M. L.</given-names></name> <name><surname>Mart&#x000ED;n-Doimeadios</surname> <given-names>R.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>Temporal variability measurements of pm2. 5 and its associated metals and microorganisms on a suburban atmosphere in the central iberian peninsula</article-title>. <source>Environ. Res</source>. 191, 110220. <pub-id pub-id-type="doi">10.1016/j.envres.2020.110220</pub-id><pub-id pub-id-type="pmid">32946891</pub-id></citation></ref>
<ref id="B37">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Shen</surname> <given-names>D.</given-names></name> <name><surname>Wu</surname> <given-names>G.</given-names></name> <name><surname>Suk</surname> <given-names>H.-I.</given-names></name></person-group> (<year>2017</year>). <article-title>Deep learning in medical image analysis</article-title>. <source>Annu. Rev. Biomed. Eng</source>. <volume>19</volume>, <fpage>221</fpage>&#x02013;<lpage>248</lpage>. <pub-id pub-id-type="doi">10.1146/annurev-bioeng-071516-044442</pub-id><pub-id pub-id-type="pmid">28301734</pub-id></citation></ref>
<ref id="B38">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Song</surname> <given-names>L.</given-names></name> <name><surname>Liu</surname> <given-names>X.</given-names></name> <name><surname>Chen</surname> <given-names>S.</given-names></name> <name><surname>Liu</surname> <given-names>S.</given-names></name> <name><surname>Liu</surname> <given-names>X.</given-names></name> <name><surname>Muhammad</surname> <given-names>K.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>A deep fuzzy model for diagnosis of COVID-19 from ct images</article-title>. <source>Appl. Soft. Comput</source>. 122, 108883. <pub-id pub-id-type="doi">10.1016/j.asoc.2022.108883</pub-id><pub-id pub-id-type="pmid">35474916</pub-id></citation></ref>
<ref id="B39">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sun</surname> <given-names>C.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Ai</surname> <given-names>S.</given-names></name> <name><surname>Chen</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>Gastric histopathology image segmentation using a hierarchical conditional random field</article-title>. <source>Biocybern. Biomed. Eng</source>. <volume>40</volume>, <fpage>1535</fpage>&#x02013;<lpage>1555</lpage>. <pub-id pub-id-type="doi">10.1016/j.bbe.2020.09.008</pub-id></citation>
</ref>
<ref id="B40">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>W.</given-names></name> <name><surname>Yu</surname> <given-names>X.</given-names></name> <name><surname>Fang</surname> <given-names>B.</given-names></name> <name><surname>Zhao</surname> <given-names>D.-Y.</given-names></name> <name><surname>Chen</surname> <given-names>Y.</given-names></name> <name><surname>Wei</surname> <given-names>W.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>Cross-modality lge-cmr segmentation using image-to-image translation based data augmentation</article-title>. <source>IEEE/ACM Trans. Comput. Biol. Bioinform</source>. 2022, 3140306. <pub-id pub-id-type="doi">10.1109/TCBB.2022.3140306</pub-id><pub-id pub-id-type="pmid">34982688</pub-id></citation></ref>
<ref id="B41">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yamaguchi</surname> <given-names>T.</given-names></name> <name><surname>Kawakami</surname> <given-names>S.</given-names></name> <name><surname>Hatamoto</surname> <given-names>M.</given-names></name> <name><surname>Imachi</surname> <given-names>H.</given-names></name> <name><surname>Takahashi</surname> <given-names>M.</given-names></name> <name><surname>Araki</surname> <given-names>N.</given-names></name> <etal/></person-group>. (<year>2015</year>). <article-title>In situ dna-hybridization chain reaction (hcr): a facilitated in situ hcr system for the detection of environmental microorganisms</article-title>. <source>Environ. Microbiol</source>. <volume>17</volume>, <fpage>2532</fpage>&#x02013;<lpage>2541</lpage>. <pub-id pub-id-type="doi">10.1111/1462-2920.12745</pub-id><pub-id pub-id-type="pmid">25523128</pub-id></citation></ref>
<ref id="B42">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Yang</surname> <given-names>H.</given-names></name> <name><surname>Zhao</surname> <given-names>X.</given-names></name> <name><surname>Jiang</surname> <given-names>T.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Zhao</surname> <given-names>P.</given-names></name> <name><surname>Chen</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>Comparative study for patch-level and pixel-level segmentation of deep learning methods on transparent images of environmental microorganisms: from convolutional neural networks to visual transformers</article-title>. <source>Appl. Sci</source>. 12, 9321. <pub-id pub-id-type="doi">10.3390/app12189321</pub-id></citation>
</ref>
<ref id="B43">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Kosov</surname> <given-names>S.</given-names></name> <name><surname>Grzegorzek</surname> <given-names>M.</given-names></name> <name><surname>Shirahama</surname> <given-names>K.</given-names></name> <name><surname>Jiang</surname> <given-names>T.</given-names></name> <etal/></person-group>. (<year>2021a</year>). <article-title>Lcu-net: A novel low-cost u-net for environmental microorganism image segmentation</article-title>. <source>Pattern Recognit</source>. 115, 107885. <pub-id pub-id-type="doi">10.1016/j.patcog.2021.107885</pub-id></citation>
</ref>
<ref id="B44">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Kulwa</surname> <given-names>F.</given-names></name> <name><surname>Zhao</surname> <given-names>X.</given-names></name> <name><surname>Sun</surname> <given-names>C.</given-names></name> <name><surname>Li</surname> <given-names>Z.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>A multiscale cnn-crf framework for environmental microorganism image segmentation</article-title>. <source>Biomed. Res. Int</source>. 2020, 4621403. <pub-id pub-id-type="doi">10.1155/2020/4621403</pub-id><pub-id pub-id-type="pmid">32724802</pub-id></citation></ref>
<ref id="B45">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Ma</surname> <given-names>P.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <etal/></person-group>. (<year>2021b</year>). <article-title>A comprehensive review of image analysis methods for microorganism counting: from classical image processing to deep learning approaches</article-title>. <source>Artif. Intell. Rev</source>. <volume>55</volume>, <fpage>2875</fpage>&#x02013;<lpage>2944</lpage>. <pub-id pub-id-type="doi">10.1007/s10462-021-10082-4</pub-id><pub-id pub-id-type="pmid">34602697</pub-id></citation></ref>
<ref id="B46">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Ma</surname> <given-names>P.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <etal/></person-group>. (<year>2022a</year>). <article-title>A comprehensive survey with quantitative comparison of image analysis methods for microorganism biovolume measurements</article-title>. <source>arXiv preprint arXiv:2202.09020</source>. <pub-id pub-id-type="doi">10.1007/s11831-022-09811-x</pub-id><pub-id pub-id-type="pmid">36091717</pub-id></citation></ref>
<ref id="B47">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Yin</surname> <given-names>Y.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Grzegorzek</surname> <given-names>M.</given-names></name></person-group> (<year>2022b</year>). <article-title>Applications of artificial neural networks in microorganism image analysis: a comprehensive review from conventional multilayer perceptron to popular convolutional neural network and potential visual transformer</article-title>. <source>Artif. Intell. Rev</source>. <volume>56</volume>, <fpage>1013</fpage>&#x02013;<lpage>1070</lpage>. <pub-id pub-id-type="doi">10.1007/s10462-022-10192-7</pub-id><pub-id pub-id-type="pmid">35528112</pub-id></citation></ref>
<ref id="B48">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Ma</surname> <given-names>P.</given-names></name> <name><surname>Jiang</surname> <given-names>T.</given-names></name> <name><surname>Zhao</surname> <given-names>X.</given-names></name> <name><surname>Tan</surname> <given-names>W.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <etal/></person-group>. (<year>2022c</year>). <article-title>Sem-rcnn: a squeeze-and-excitation-based mask region convolutional neural network for multi-class environmental microorganism detection</article-title>. <source>Appl. Sci</source>. 12, 9902. <pub-id pub-id-type="doi">10.3390/app12199902</pub-id></citation>
</ref>
<ref id="B49">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Zhao</surname> <given-names>X.</given-names></name> <name><surname>Jiang</surname> <given-names>T.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Yao</surname> <given-names>Y.</given-names></name> <name><surname>Lin</surname> <given-names>Y.-H.</given-names></name> <etal/></person-group>. (<year>2022d</year>). <article-title>An application of pixel interval down-sampling (pid) for dense tiny microorganism counting on environmental microorganism images</article-title>. <source>Appl. Sci</source>. 12, 7314. <pub-id pub-id-type="doi">10.3390/app12147314</pub-id></citation>
</ref>
<ref id="B50">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhao</surname> <given-names>P.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Rahaman</surname> <given-names>M.</given-names></name> <name><surname>Xu</surname> <given-names>H.</given-names></name> <name><surname>Yang</surname> <given-names>H.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2022a</year>). <article-title>A comparative study of deep learning classification methods on a small environmental microorganism image dataset (emds-6): from convolutional neural networks to visual transformers</article-title>. <source>Front. Microbiol</source>. 13, 792166. <pub-id pub-id-type="doi">10.3389/fmicb.2022.792166</pub-id><pub-id pub-id-type="pmid">35308350</pub-id></citation></ref>
<ref id="B51">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhao</surname> <given-names>P.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Rahaman</surname> <given-names>M. M.</given-names></name> <name><surname>Xu</surname> <given-names>H.</given-names></name> <name><surname>Ma</surname> <given-names>P.</given-names></name> <name><surname>Yang</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2022b</year>). <article-title>Emds-6: Environmental microorganism image dataset sixth version for image denoising, segmentation, feature extraction, classification, and detection method evaluation</article-title>. <source>Front. Microbiol</source>. 13, 829027. <pub-id pub-id-type="doi">10.3389/fmicb.2022.829027</pub-id><pub-id pub-id-type="pmid">35547119</pub-id></citation></ref>
<ref id="B52">
<citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zou</surname> <given-names>S.</given-names></name> <name><surname>Li</surname> <given-names>C.</given-names></name> <name><surname>Sun</surname> <given-names>H.</given-names></name> <name><surname>Xu</surname> <given-names>P.</given-names></name> <name><surname>Zhang</surname> <given-names>J.</given-names></name> <name><surname>Ma</surname> <given-names>P.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>TOD-CNN: an effective convolutional neural network for tiny object detection in sperm videos</article-title>. <source>Comput. Biol. Med</source>. 146, 105543. <pub-id pub-id-type="doi">10.1016/j.compbiomed.2022.105543</pub-id><pub-id pub-id-type="pmid">35483229</pub-id></citation></ref>
</ref-list> 
</back>
</article> 