<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="editorial" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Mar. Sci.</journal-id>
<journal-title>Frontiers in Marine Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Mar. Sci.</abbrev-journal-title>
<issn pub-type="epub">2296-7745</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fmars.2023.1256183</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Marine Science</subject>
<subj-group>
<subject>Editorial</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Editorial: Optics and machine vision for marine observation</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Song</surname>
<given-names>Hong</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="author-notes" rid="fn001">
<sup>*</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1712488"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Mehdi</surname>
<given-names>Syed Raza</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1897540"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Wang</surname>
<given-names>Mengjie</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1984403"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Liao</surname>
<given-names>Ran</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1401491"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Naqvi</surname>
<given-names>Rizwan Ali</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1710723"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Xie</surname>
<given-names>Surui</given-names>
</name>
<xref ref-type="aff" rid="aff4">
<sup>4</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1734097"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>Department of Ocean Engineering, Ocean College, Zhejiang University</institution>, <addr-line>Zhoushan, Zhejiang</addr-line>, <country>China</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Shenzhen International Graduate School, Tsinghua University</institution>, <addr-line>Shenzhen</addr-line>, <country>China</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>Department of Intelligent Mechatronics Engineering, Sejong University</institution>, <addr-line>Seoul</addr-line>, <country>Republic of Korea</country>
</aff>
<aff id="aff4">
<sup>4</sup>
<institution>Department of Civil and Environmental Engineering, University of Houston</institution>, <addr-line>Houston, TX</addr-line>, <country>United States</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>Edited and Reviewed by: Oliver Zielinski, Leibniz Institute for Baltic Sea Research (LG), Germany</p>
</fn>
<fn fn-type="corresp" id="fn001">
<p>*Correspondence: Hong Song, <email xlink:href="mailto:hongsong@zju.edu.cn">hongsong@zju.edu.cn</email>
</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>14</day>
<month>08</month>
<year>2023</year>
</pub-date>
<pub-date pub-type="collection">
<year>2023</year>
</pub-date>
<volume>10</volume>
<elocation-id>1256183</elocation-id>
<history>
<date date-type="received">
<day>10</day>
<month>07</month>
<year>2023</year>
</date>
<date date-type="accepted">
<day>02</day>
<month>08</month>
<year>2023</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2023 Song, Mehdi, Wang, Liao, Naqvi and Xie</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Song, Mehdi, Wang, Liao, Naqvi and Xie</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<related-article id="RA1" related-article-type="commentary-article" xlink:href="https://www.frontiersin.org/research-topics/35260" ext-link-type="uri">Editorial on the Research Topic <article-title>Optics and machine vision for marine observation</article-title>
</related-article>
<kwd-group>
<kwd>underwater optics</kwd>
<kwd>underwater imaging</kwd>
<kwd>image enhancement</kwd>
<kwd>image processing</kwd>
<kwd>machine learning</kwd>
<kwd>marine observation</kwd>
<kwd>object recognition</kwd>
</kwd-group>
<counts>
<fig-count count="0"/>
<table-count count="1"/>
<equation-count count="0"/>
<ref-count count="11"/>
<page-count count="4"/>
<word-count count="2275"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-in-acceptance</meta-name>
<meta-value>Ocean Observation</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<p>The aquatic ecosystem of the planet makes up a sizable amount of 71% of its surface that contain numerous living forms and an abundance of organic and inorganic resources throughout this enormous area (<xref ref-type="bibr" rid="B3">Issac and Kandasubramanian, 2021</xref>). Scientists and researchers have long been enthralled by the immense and enigmatic expanse of the marine ecosystems. The ocean&#x2019;s intricate ecosystems, diverse marine life, and the profound impact they have on our planet make understanding and monitoring these environments crucial (<xref ref-type="bibr" rid="B4">Maximenko et&#xa0;al., 2019</xref>). Both anthropogenic and natural activities have significantly increased recently, causing ecological problems in the marine environment (<xref ref-type="bibr" rid="B2">Huang et&#xa0;al., 2023</xref>). To successfully address and mitigate the resulting ecological mutilations, these disturbances call for the development of quick monitoring and mitigation mechanisms. As a result, the scientific community has been forced to explore numerous routes to push the limits of marine observation.</p>
<p>Underwater ecosystems have been mostly shrouded in darkness due to light attenuation, hindering comprehensive observation and data collection. But improvements in optics have fundamentally altered our capacity to perceive the underwater environment. Advances in high-resolution image capture, video recording, and spectral data acquisition have been made possible by cutting-edge imaging technology like underwater cameras, spectrometers, and hyperspectral sensors (<xref ref-type="bibr" rid="B9">Song et&#xa0;al., 2021a</xref>; <xref ref-type="bibr" rid="B6">Shahani et&#xa0;al., 2021</xref>). Through the study of species&#x2019; behavior, distribution, and interactions, hidden ecosystems are revealed and scientists are able to explore marine habitats in new detail.</p>
<p>Automated analysis of underwater imagery has been made possible by machine vision techniques used in conjunction with optics. Computers can now extract complex traits and accurately categorize marine organisms thanks to deep learning techniques, a subset of machine learning that has revolutionized image processing and pattern identification. There are many new possibilities for marine surveillance now that machine vision systems, optics, and deep learning approaches have been combined. Automation, data analysis, and real-time monitoring are just a few advantages that machine vision and deep learning algorithms together offer. The topic of marine species tracking and identification is one of the most notable applications (<xref ref-type="bibr" rid="B1">Chuang et&#xa0;al., 2016</xref>). Massive volumes of underwater imagery may be quickly analyzed using deep learning algorithms, which can then accurately and automatically identify and classify aquatic organisms (<xref ref-type="bibr" rid="B8">Song et&#xa0;al., 2020</xref>; <xref ref-type="bibr" rid="B10">Song et&#xa0;al., 2021b</xref>). These developments are essential for following migration patterns, evaluating the health of marine populations, and spotting possible threats to biodiversity. Machine vision and deep learning speed up research efforts by reducing the time-consuming and labor-intensive process of manual identification, enabling scientists to make educated conclusions about conservation measures and policy-making.</p>
<p>In conjunction with machine vision algorithms, remote sensing systems can monitor changes in ocean currents, sea surface temperature, and the spread of dangerous algal blooms (<xref ref-type="bibr" rid="B7">Son et&#xa0;al., 2015</xref>). For studying climate patterns, predicting weather occurrences, and reducing the possible effects of natural disasters on coastal communities, these real-time measurements are crucial. Additionally, the monitoring of human activities and their effects on marine habitats is made easier by the integration of optics, machine vision, and deep learning. Machine vision systems can monitor and identify potential pollution, illicit fishing, and habitat devastation (<xref ref-type="bibr" rid="B5">Mehdi et&#xa0;al., 2022</xref>; <xref ref-type="bibr" rid="B11">Yasir et&#xa0;al., 2023</xref>).</p>
<p>Understanding and maintaining a close eye on the dynamics and health of oceans depends heavily on marine observation. We can employ machine vision, which focuses on creating algorithms and systems for understanding visual data, and optics, which deals with the study and manipulation of light, to better observe and understand marine ecosystems. For this purpose, the Research Topic &#x201c;<italic>Optics and machine vision for marine observation</italic>&#x201d; focuses to explore the intersection of optics, machine vision, and deep learning technologies and their applications in making the field of marine observation more effective. It provides a collection of recent findings, developments, and innovative strategies related to underwater sensors, imaging systems, computer vision algorithms, and data analysis techniques that leverage optics and machine vision technologies for various aspects of marine observation. The Research Topic explores the transformative potential of optics and machine vision and their applications in contributing to the advancements of marine observation systems. The Research Topic is comprised of 24 articles, collectively representing the contribution of 118 authors (<xref ref-type="table" rid="T1">
<bold>Table&#xa0;1</bold>
</xref>).</p>
<table-wrap id="T1" position="float">
<label>Table&#xa0;1</label>
<caption>
<p>Summary of chapters published in this Research Topic.</p>
</caption>
<table frame="hsides">
<thead>
<tr>
<th valign="middle" align="left">DOI</th>
<th valign="middle" align="left">Title</th>
<th valign="middle" align="left">Keywords</th>
<th valign="middle" align="left">Authors</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.922669</td>
<td valign="middle" align="left">Submarine optical fiber sensing system for the real-time monitoring of depth, vibration, and temperature</td>
<td valign="middle" align="left">optical fiber sensing, fiber Bragg grating, submarine real-time monitoring, environmental monitoring, optical fiber sensor</td>
<td valign="middle" align="left">Liu Z, Zhang S, Yang C, Chung W-H and Li Z</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1003568</td>
<td valign="middle" align="left">PSS-net: Parallel semantic segmentation network for detecting marine animals in underwater scene</td>
<td valign="middle" align="left">detecting marine animal, underwater scene, protective colors, PSS-net, attention technique</td>
<td valign="middle" align="left">Kim YH and Park KR</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1010565</td>
<td valign="middle" align="left">Robust segmentation of underwater fish based on multi-level feature accumulation</td>
<td valign="middle" align="left">artificial intelligence, marine environment, underwater computer vision, fish segmentation, EFS-net and MFAS-net</td>
<td valign="middle" align="left">Haider A, Arsalan M, Choi J, Sultan H and Park KR</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1024339</td>
<td valign="middle" align="left">Underwater image restoration through regularization of coherent structures</td>
<td valign="middle" align="left">underwater images, image restoration, robust regularization, coherent structures, optimization problem</td>
<td valign="middle" align="left">Ali U and Mahmood MT</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1047053</td>
<td valign="middle" align="left">Single underwater image enhancement based on differential attenuation compensation</td>
<td valign="middle" align="left">underwater image, image enhancement, contrast stretching, differential attenuation compensation, machine vision</td>
<td valign="middle" align="left">Lai Y, Zhou Z, Su B, Xuanyuan Z, Tang J, Yan J, Liang W and Chen J</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1030113</td>
<td valign="middle" align="left">RACE-SM: Reliability and adaptive cooperation for efficient UWSNs using sink mobility</td>
<td valign="middle" align="left">UWSNs, energy efficient routing, routing protocols, sink mobility, cooperative routing</td>
<td valign="middle" align="left">Ahmad I, Rahman T, Khan I, Jan S, Musa S and Uddin MI</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1056300</td>
<td valign="middle" align="left">Underwater object detection algorithm based on attention mechanism and cross-stage partial fast spatial pyramidal pooling</td>
<td valign="middle" align="left">Underwater Object detection, ACFP-YOLO, YOLOv7, attention, SPPFCSPC</td>
<td valign="middle" align="left">Yan J, Zhou Z, Zhou D, Su B, Xuanyuan Z, Tang J, Lai Y, Chen J and Liang W</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1031549</td>
<td valign="middle" align="left">UCRNet: Underwater color image restoration <italic>via</italic> a polarization-guided convolutional neural network</td>
<td valign="middle" align="left">Polarization, polarimetric imaging, scattering media, imaging recovery, physical imaging</td>
<td valign="middle" align="left">Hu H, Huang Y, Li X, Jiang L, Che L, Liu T and Zhai J</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1032287</td>
<td valign="middle" align="left">RMP-Net: A structural reparameterization and subpixel super-resolution-based marine scene segmentation network</td>
<td valign="middle" align="left">submarine exploration, underwater scene, RMP-Net, structural re-parameterization, multiscale fusion</td>
<td valign="middle" align="left">Chen J, Tang J, Lin S, Liang W, Su B, Yan J, Zhou D, Wang L, Lai Y and Yang B</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1071618</td>
<td valign="middle" align="left">UMOTMA: Underwater multiple object tracking with memory aggregation</td>
<td valign="middle" align="left">artificial intelligence, underwater multiple object tracking, marine environment, long-short term memory, vision transformer</td>
<td valign="middle" align="left">Hao Z, Qiu J, Zhang H, Ren G and Liu C</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1039898</td>
<td valign="middle" align="left">A dual stream hierarchical transformer for starvation grading of golden pomfret in marine aquaculture</td>
<td valign="middle" align="left">neural network, transformer, starvation grading, marine image processing, behavior recognition</td>
<td valign="middle" align="left">Zheng K, Yang R, Li R, Yang L, Qin H and Li Z</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1073615</td>
<td valign="middle" align="left">Multiscale attention-based detection of tiny targets in aerial beach images</td>
<td valign="middle" align="left">tiny object detection, multiscale attention, feature pyramid network, attention mechanism, unmanned aerial vehicle</td>
<td valign="middle" align="left">Gao S, Liu C, Zhang H, Zhou Z and Qiu J</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1058201</td>
<td valign="middle" align="left">The analysis and design of deep-sea lighting field based on spectral transfer function</td>
<td valign="middle" align="left">underwater optics, underwater imaging, underwater lighting, marine observation, object recognition</td>
<td valign="middle" align="left">Quan X, Wei Y, Liu K and Li B</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1094915</td>
<td valign="middle" align="left">Deep learning-based marine big data fusion for ocean environment monitoring: Towards shape optimization and salient objects detection</td>
<td valign="middle" align="left">data fusion, marine big data, ocean environment, underwater saliency detection, underwater image processing</td>
<td valign="middle" align="left">Khan S, Ullah I, Ali F, Shafiq M, Ghadi YY and Kim T</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1086140</td>
<td valign="middle" align="left">Multi-scale ship target detection using SAR images based on improved Yolov5</td>
<td valign="middle" align="left">synthetic aperture radar (SAR), ship identification, artificial intelligence, deep learning (DL), YOLOv5S, SAR ship detection dataset (SSDD), AirSARship</td>
<td valign="middle" align="left">Yasir M, Shanwei L, Mingming X, Hui S, Hossain MS, Colak ATI, Wang D, Jianhua W and Dang KB</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2022.1058019</td>
<td valign="middle" align="left">Single underwater image enhancement based on adaptive correction of channel differential and fusion</td>
<td valign="middle" align="left">underwater image processing, image enhancement, histogram stretching, color correction, fusion</td>
<td valign="middle" align="left">Zhao Z, Zhou Z, Lai Y, Wang T, Zou S, Cai H and Xie H</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2023.1074428</td>
<td valign="middle" align="left">Deep focus-extended darkfield imaging for <italic>in situ</italic> observation of marine plankton</td>
<td valign="middle" align="left">underwater imaging, deep learning, focus extension, ocean observation, marine plankton</td>
<td valign="middle" align="left">Chen T, Li J, Ma W, Guo G, Yang Z, Li Z and Qiao J</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2023.1117787</td>
<td valign="middle" align="left">Energy-efficient clustering protocol for underwater wireless sensor networks using optimized glowworm swarm optimization</td>
<td valign="middle" align="left">UWSN, energy, GSO, routing, clustering, bio-inspired</td>
<td valign="middle" align="left">Bharany S, Sharma S, Alsharabi N, Tag Eldin E and Ghamry NA</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2023.1135058</td>
<td valign="middle" align="left">Laser-induced breakdown spectroscopy instrument and spectral analysis for deep-ocean Fe-Mn crusts</td>
<td valign="middle" align="left">deep ocean, marine spectral analysis, marine resource exploration, laser-induced breakdown spectroscopy, marine Fe-Mn crusts, parameter optimization</td>
<td valign="middle" align="left">Yang G, Chen G, Cai Z, Quan X and Zhu Y</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2023.1031869</td>
<td valign="middle" align="left">Investigating the rate of turbidity impact on underwater spectral reflectance detection</td>
<td valign="middle" align="left">spectral reflectance, spectral imaging, turbidity, spectral detection, liquid crystal tunable filters, spectral features, underwater scattering</td>
<td valign="middle" align="left">Song H, Mehdi SR, Li Z, Wang M, Wu C, Venediktov VY and Huang H</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2023.1135356</td>
<td valign="middle" align="left">Identification of marine oil spill pollution using hyperspectral combined with thermal infrared remote sensing</td>
<td valign="middle" align="left">marine oil spills, hyperspectral remote sensing, thermal infrared remote sensing, oil pollution types identification, deep learning</td>
<td valign="middle" align="left">Yang J, Hu Y, Zhang J, Ma Y, Li Z and Jiang Z</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2023.1124185</td>
<td valign="middle" align="left">Real-time detection of deep-sea hydrothermal plume based on machine vision and deep learning</td>
<td valign="middle" align="left">hydrothermal plume, deep-sea, real-time, object detection, deep learning, transfer learning, YOLOv5</td>
<td valign="middle" align="left">Wang X, Cao Y, Wu S and Yang C</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2023.1138013</td>
<td valign="middle" align="left">An underwater image enhancement model for domain adaptation</td>
<td valign="middle" align="left">underwater image, image enhancement, underwater dataset, domain adaptation, deep learning</td>
<td valign="middle" align="left">Deng X, Liu T, He S, Xiao X, Li P and Gu Y</td>
</tr>
<tr>
<td valign="middle" align="left">10.3389/fmars.2023.1167191</td>
<td valign="middle" align="left">An early warning model for starfish disaster based on multi-sensor fusion</td>
<td valign="middle" align="left">starfish disaster, multi-sensor fusion, early-warning model, self-supervised model, feature selection</td>
<td valign="middle" align="left">Li L, Liu T, Huang H, Song H, He S, Li P, Gu Y and Chen J</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>A wide domain of research is involved in the development and implementation of optics and machine vision for marine observation, including optical sensors and monitoring systems, image processing, deep learning techniques, deep-sea illumination, spectral image analysis, etc. Several researchers address the development of underwater monitoring methods based on optical fiber sensing for real-time study of environmental parameters (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.922669">Liu et&#xa0;al.</ext-link>), and water quality observation based on multi-sensor fusion for early warning of starfish disaster (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2023.1167191">Li et&#xa0;al.</ext-link>). Two studies discuss underwater sensor networks and protocols for explorations of underwater resources through efficient data collection (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1030113">Ahmad et&#xa0;al.</ext-link>; <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2023.1117787">Bharany et&#xa0;al.</ext-link>). Numerous papers deliver improved techniques and applications of deep learning for underwater object detection while several studies highly concentrated on underwater image enhancement in support of algorithm development, validation, and verification. Multiple papers explore the applications of deep learning for underwater object detection (fish classes, and organic and inorganic submarine objects: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1056300">Yan et&#xa0;al.</ext-link>; <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1094915">Khan et&#xa0;al.</ext-link>; hydrothermal plumes detection: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2023.1124185">Wang et&#xa0;al.</ext-link>) and image segmentation (fish: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1003568">Kim and Park</ext-link>; <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1010565">Haider et&#xa0;al.</ext-link>; <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1032287">Chen, J. et&#xa0;al.</ext-link>). One study proposes an advanced trajectory tracking mechanism for underwater fish classes including multi-object detection (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1071618">Hao et&#xa0;al.</ext-link>). Another study proposes and assesses a starvation grading model for fish class based on image processing and CNN that can benefit the field of fisheries (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1039898">Zheng et&#xa0;al.</ext-link>). For aerial-based monitoring of coastal areas, a paper suggests small size objects detection technique based on CNN (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1073615">Gao et&#xa0;al.</ext-link>). Papers based on spectral technologies address a range of topics including deep-sea illumination to compensate light attenuation (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1058201">Quan et&#xa0;al.</ext-link>), effects of turbidity on spectral imaging (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2023.1031869">Song et&#xa0;al.</ext-link>), and spectral imaging based deep-sea mineral exploration (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2023.1135058">Yang, G. et&#xa0;al.</ext-link>). In the field of marine observation, remote sensing provides valuable insights into the state of marine environment. Two papers contributed to the field of ocean remote sensing using hyperspectral imaging and CNNs for the detection of ships (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1086140">Yasir et&#xa0;al.</ext-link>), and the classification of oil spills (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2023.1135356">Yang, J. et&#xa0;al.</ext-link>). Several contributions in the field of underwater image processing include image restoration (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1024339">Ali and Mahmood</ext-link>; color restoration: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1031549">Hu et&#xa0;al.</ext-link>), and image enhancement (<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1047053">Lai et&#xa0;al.</ext-link>; <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2022.1058019">Zhao et&#xa0;al.</ext-link>; <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2023.1074428">Chen, T. et&#xa0;al.</ext-link>; <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fmars.2023.1138013">Deng et&#xa0;al.</ext-link>).</p>
<p>The ability to monitor and understand the marine environment has changed dramatically as a result of the merging of optics and machine vision technologies with marine research. These developments have given scientists the tools they need to solve the urgent ecological problems that are being caused by both natural events and human activity. These potent tools can be used by researchers to gain insightful knowledge of the marine ecosystem, facilitating well-informed decision-making and efficient mitigation measures. As a result, the limits of scientific understanding in marine science are being widely pushed, advancing our comprehension of this complex field to unprecedented heights.</p>
<sec id="s1" sec-type="author-contributions">
<title>Author contributions</title>
<p>HS: Data curation, Investigation, Supervision, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing. SM: Data curation, Investigation, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing. MW: Data curation, Investigation, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing. RL: Data curation, Investigation, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing. RN: Data curation, Investigation, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing. SX: Data curation, Investigation, Writing &#x2013; original draft, Writing &#x2013; review &amp; editing.</p>
</sec>
</body>
<back>
<ack>
<title>Acknowledgments</title>
<p>We extend our appreciation to the researchers and scientists who provided this Research Topic with their insightful knowledge. We also express our sincere gratitude to the devoted reviewers for their meticulous consideration and constructive criticism. Their efforts were crucial in determining the success of this Research Topic.</p>
</ack>
<sec id="s2" sec-type="COI-statement">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec id="s3" sec-type="disclaimer">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="confproc">
<person-group person-group-type="author">
<name>
<surname>Chuang</surname> <given-names>M.-C.</given-names>
</name>
<name>
<surname>Hwang</surname> <given-names>J.-N.</given-names>
</name>
<name>
<surname>Ye</surname> <given-names>J.-H.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>S.-C.</given-names>
</name>
<name>
<surname>Williams</surname> <given-names>K.</given-names>
</name>
</person-group> (<year>2016</year>). &#x201c;<article-title>Underwater fish tracking for moving cameras based on deformable multiple kernels</article-title>,&#x201d; in <conf-name>IEEE Transactions on Systems, Man, and Cybernetics: Systems</conf-name>, Vol. <volume>47</volume>. <fpage>2467</fpage>&#x2013;<lpage>2477</lpage>.</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Huang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Cai</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Qureshi</surname> <given-names>J. U.</given-names>
</name>
<name>
<surname>Mehdi</surname> <given-names>S. R.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>C.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Proceeding the categorization of microplastics through deep learning-based image segmentation</article-title>. <source>Sci. Total Environ.</source> <volume>896</volume>, <fpage>165308</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.scitotenv.2023.165308</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Issac</surname> <given-names>M. N.</given-names>
</name>
<name>
<surname>Kandasubramanian</surname> <given-names>B.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Effect of microplastics in water and aquatic systems</article-title>. <source>Environ. Sci. pollut. Res.</source> <volume>28</volume>, <fpage>19544</fpage>&#x2013;<lpage>19562</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s11356-021-13184-2</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Maximenko</surname> <given-names>N.</given-names>
</name>
<name>
<surname>Corradi</surname> <given-names>P.</given-names>
</name>
<name>
<surname>Law</surname> <given-names>K. L.</given-names>
</name>
<name>
<surname>Van Sebille</surname> <given-names>E.</given-names>
</name>
<name>
<surname>Garaba</surname> <given-names>S. P.</given-names>
</name>
<name>
<surname>Lampitt</surname> <given-names>R. S.</given-names>
</name>
<etal/>
</person-group>. (<year>2019</year>). <article-title>Toward the integrated marine debris observing system</article-title>. <source>Front. Mar. Sci.</source> <volume>6</volume>, <elocation-id>447</elocation-id>. doi: <pub-id pub-id-type="doi">10.3389/fmars.2019.00447</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mehdi</surname> <given-names>S. R.</given-names>
</name>
<name>
<surname>Raza</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Naqvi</surname> <given-names>R. A.</given-names>
</name>
<name>
<surname>Ali</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>H.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Combining deep learning with single-spectrum UV imaging for rapid detection of HNSs spills</article-title>. <source>Remote Sens.</source> <volume>14</volume> (<issue>3</issue>), <fpage>576</fpage>. doi: <pub-id pub-id-type="doi">10.3390/rs14030576</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Shahani</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Song</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Mehdi</surname> <given-names>S. R.</given-names>
</name>
<name>
<surname>Sharma</surname> <given-names>A.</given-names>
</name>
<name>
<surname>Tunio</surname> <given-names>G.</given-names>
</name>
<name>
<surname>Qureshi</surname> <given-names>J.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>). <article-title>Design and testing of an underwater microscope with variable objective lens for the study of benthic communities</article-title>. <source>J. Mar. Sci. Appl.</source> <volume>20</volume>, <fpage>170</fpage>&#x2013;<lpage>178</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s11804-020-00185-9</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Son</surname> <given-names>Y. B.</given-names>
</name>
<name>
<surname>Choi</surname> <given-names>B.-J.</given-names>
</name>
<name>
<surname>Kim</surname> <given-names>Y. H.</given-names>
</name>
<name>
<surname>Park</surname> <given-names>Y.-G.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Tracing floating green algae blooms in the Yellow Sea and the East China Sea using GOCI satellite data and Lagrangian transport simulations</article-title>. <source>Remote Sens. Environ.</source> <volume>156</volume>, <fpage>21</fpage>&#x2013;<lpage>33</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.rse.2014.09.024</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Song</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Mehdi</surname> <given-names>S. R.</given-names>
</name>
<name>
<surname>Huang</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Shahani</surname> <given-names>K.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Raza</surname> <given-names>K.</given-names>
</name>
<etal/>
</person-group>. (<year>2020</year>). <article-title>Classification of freshwater zooplankton by pre-trained convolutional neural network in underwater microscopy</article-title>. <source>Int. J. Adv. Comput. Sci. Appl.</source> <volume>11</volume> (<issue>7</issue>), P <page-range>252&#x2013;258</page-range>. doi: <pub-id pub-id-type="doi">10.14569/IJACSA.2020.0110733</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Song</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Mehdi</surname> <given-names>S. R.</given-names>
</name>
<name>
<surname>Wu</surname> <given-names>C.</given-names>
</name>
<name>
<surname>Li</surname> <given-names>Z.</given-names>
</name>
<name>
<surname>Gong</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Ali</surname> <given-names>A.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>a). <article-title>Underwater spectral imaging system based on liquid crystal tunable filter</article-title>. <source>J. Mar. Sci. Eng.</source> <volume>9</volume> (<issue>11</issue>), <fpage>1206</fpage>. doi: <pub-id pub-id-type="doi">10.3390/jmse9111206</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Song</surname> <given-names>H.</given-names>
</name>
<name>
<surname>Mehdi</surname> <given-names>S. R.</given-names>
</name>
<name>
<surname>Zhang</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Shentu</surname> <given-names>Y.</given-names>
</name>
<name>
<surname>Wan</surname> <given-names>Q.</given-names>
</name>
<name>
<surname>Wang</surname> <given-names>W.</given-names>
</name>
<etal/>
</person-group>. (<year>2021</year>b). <article-title>Development of coral investigation system based on semantic segmentation of single-channel images</article-title>. <source>Sensors</source> <volume>21</volume> (<issue>5</issue>), <fpage>1848</fpage>. doi: <pub-id pub-id-type="doi">10.3390/s21051848</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yasir</surname> <given-names>M.</given-names>
</name>
<name>
<surname>Zhan</surname> <given-names>L.</given-names>
</name>
<name>
<surname>Liu</surname> <given-names>S.</given-names>
</name>
<name>
<surname>Wan</surname> <given-names>J.</given-names>
</name>
<name>
<surname>Hossain</surname> <given-names>Md S.</given-names>
</name>
<name>
<surname>Colak</surname> <given-names>A. T. I.</given-names>
</name>
<etal/>
</person-group>. (<year>2023</year>). <article-title>Instance segmentation ship detection based on improved Yolov7 using complex background SAR images</article-title>. <source>Front. Mar. Sci.</source> <volume>10</volume>, <elocation-id>1113669</elocation-id>. doi: <pub-id pub-id-type="doi">10.3389/fmars.2023.1113669</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>