<!DOCTYPE article
PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.4 20190208//EN"
       "JATS-journalpublishing1.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="1.4" xml:lang="en">
 <front>
  <journal-meta>
   <journal-id journal-id-type="publisher-id">Foods and Raw Materials</journal-id>
   <journal-title-group>
    <journal-title xml:lang="en">Foods and Raw Materials</journal-title>
    <trans-title-group xml:lang="ru">
     <trans-title>Foods and Raw Materials</trans-title>
    </trans-title-group>
   </journal-title-group>
   <issn publication-format="print">2308-4057</issn>
   <issn publication-format="online">2310-9599</issn>
  </journal-meta>
  <article-meta>
   <article-id pub-id-type="publisher-id">89720</article-id>
   <article-id pub-id-type="doi">10.21603/2308-4057-2025-2-650</article-id>
   <article-categories>
    <subj-group subj-group-type="toc-heading" xml:lang="ru">
     <subject>Research Article</subject>
    </subj-group>
    <subj-group subj-group-type="toc-heading" xml:lang="en">
     <subject>Research Article</subject>
    </subj-group>
    <subj-group>
     <subject>Research Article</subject>
    </subj-group>
   </article-categories>
   <title-group>
    <article-title xml:lang="en">UAV imagery, advanced deep learning, and YOLOv7 object detection model in enhancing citrus yield estimation</article-title>
    <trans-title-group xml:lang="ru">
     <trans-title>UAV imagery, advanced deep learning, and YOLOv7 object detection model in enhancing citrus yield estimation</trans-title>
    </trans-title-group>
   </title-group>
   <contrib-group content-type="authors">
    <contrib contrib-type="author">
     <contrib-id contrib-id-type="orcid">https://orcid.org/0009-0006-5525-7956</contrib-id>
     <name-alternatives>
      <name xml:lang="ru">
       <surname>Daiaeddine</surname>
       <given-names>Mohamed Jibril </given-names>
      </name>
      <name xml:lang="en">
       <surname>Daiaeddine</surname>
       <given-names>Mohamed Jibril </given-names>
      </name>
     </name-alternatives>
     <email>mohamedjibril.daiaeddine@usms.ma</email>
     <xref ref-type="aff" rid="aff-1"/>
    </contrib>
    <contrib contrib-type="author">
     <contrib-id contrib-id-type="orcid">https://orcid.org/0009-0000-2675-4810</contrib-id>
     <name-alternatives>
      <name xml:lang="ru">
       <surname>Badrouss</surname>
       <given-names>Sara </given-names>
      </name>
      <name xml:lang="en">
       <surname>Badrouss</surname>
       <given-names>Sara </given-names>
      </name>
     </name-alternatives>
     <xref ref-type="aff" rid="aff-2"/>
    </contrib>
    <contrib contrib-type="author">
     <contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-3976-4588</contrib-id>
     <name-alternatives>
      <name xml:lang="ru">
       <surname>El Harti</surname>
       <given-names>Abderrazak </given-names>
      </name>
      <name xml:lang="en">
       <surname>El Harti</surname>
       <given-names>Abderrazak </given-names>
      </name>
     </name-alternatives>
     <xref ref-type="aff" rid="aff-3"/>
    </contrib>
    <contrib contrib-type="author">
     <contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-4163-6307</contrib-id>
     <name-alternatives>
      <name xml:lang="ru">
       <surname>Bachaoui</surname>
       <given-names>El Mostafa </given-names>
      </name>
      <name xml:lang="en">
       <surname>Bachaoui</surname>
       <given-names>El Mostafa </given-names>
      </name>
     </name-alternatives>
     <xref ref-type="aff" rid="aff-4"/>
    </contrib>
    <contrib contrib-type="author">
     <contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-9448-6165</contrib-id>
     <name-alternatives>
      <name xml:lang="ru">
       <surname>Biniz</surname>
       <given-names>Mohamed </given-names>
      </name>
      <name xml:lang="en">
       <surname>Biniz</surname>
       <given-names>Mohamed </given-names>
      </name>
     </name-alternatives>
     <xref ref-type="aff" rid="aff-5"/>
    </contrib>
    <contrib contrib-type="author">
     <contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-3312-8230</contrib-id>
     <name-alternatives>
      <name xml:lang="ru">
       <surname>Mouncif</surname>
       <given-names>Hicham </given-names>
      </name>
      <name xml:lang="en">
       <surname>Mouncif</surname>
       <given-names>Hicham </given-names>
      </name>
     </name-alternatives>
     <xref ref-type="aff" rid="aff-6"/>
    </contrib>
   </contrib-group>
   <aff-alternatives id="aff-1">
    <aff>
     <institution xml:lang="ru">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Марокко</country>
    </aff>
    <aff>
     <institution xml:lang="en">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Morocco</country>
    </aff>
   </aff-alternatives>
   <aff-alternatives id="aff-2">
    <aff>
     <institution xml:lang="ru">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Марокко</country>
    </aff>
    <aff>
     <institution xml:lang="en">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Morocco</country>
    </aff>
   </aff-alternatives>
   <aff-alternatives id="aff-3">
    <aff>
     <institution xml:lang="ru">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Марокко</country>
    </aff>
    <aff>
     <institution xml:lang="en">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Morocco</country>
    </aff>
   </aff-alternatives>
   <aff-alternatives id="aff-4">
    <aff>
     <institution xml:lang="ru">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Марокко</country>
    </aff>
    <aff>
     <institution xml:lang="en">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Morocco</country>
    </aff>
   </aff-alternatives>
   <aff-alternatives id="aff-5">
    <aff>
     <institution xml:lang="ru">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Марокко</country>
    </aff>
    <aff>
     <institution xml:lang="en">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Morocco</country>
    </aff>
   </aff-alternatives>
   <aff-alternatives id="aff-6">
    <aff>
     <institution xml:lang="ru">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Марокко</country>
    </aff>
    <aff>
     <institution xml:lang="en">Sultan Moulay Slimane University</institution>
     <city>Beni Mellal</city>
     <country>Morocco</country>
    </aff>
   </aff-alternatives>
   <pub-date publication-format="print" date-type="pub" iso-8601-date="2025-01-01T00:00:00+03:00">
    <day>01</day>
    <month>01</month>
    <year>2025</year>
   </pub-date>
   <pub-date publication-format="electronic" date-type="pub" iso-8601-date="2025-01-01T00:00:00+03:00">
    <day>01</day>
    <month>01</month>
    <year>2025</year>
   </pub-date>
   <volume>13</volume>
   <issue>2</issue>
   <fpage>242</fpage>
   <lpage>253</lpage>
   <history>
    <date date-type="received" iso-8601-date="2024-01-04T00:00:00+03:00">
     <day>04</day>
     <month>01</month>
     <year>2024</year>
    </date>
    <date date-type="accepted" iso-8601-date="2024-05-07T00:00:00+03:00">
     <day>07</day>
     <month>05</month>
     <year>2024</year>
    </date>
   </history>
   <self-uri xlink:href="https://jfrm.ru/en/issues/22898/22915/">https://jfrm.ru/en/issues/22898/22915/</self-uri>
   <abstract xml:lang="ru">
    <p>Accurate citrus fruit yield and estimation is of utmost importance for precise agricultural management. Unmanned aerial vehicle (UAV) remote-sensing systems present a compelling solution to this problem. These systems capture remote-sensing imagery with both high temporal and spatial resolution, thus empowering farmers with valuable insights for better decisionmaking. This research assessed the potential application of UAV imagery combined with the YOLOv7 object detection model for the precise estimation of citrus yield.&#13;
Images of citrus trees were captured in their natural field setting using a quadcopter-mounted UAV camera. Data augmentation techniques were applied to enhance the dataset diversity; the original YOLOv7 architecture and training parameters were modified to improve the model’s accuracy in detecting citrus fruits.&#13;
The test results demonstrated commendable performance, with a precision of 96%, a recall of 100%, and an F1-score of 97.95%. The correlation between the fruit numbers recognized by the algorithm and the actual fruit numbers from 20 sample trees provided the coefficient R2 of 0.98.&#13;
The strong positive correlation confirmed both the accuracy of the algorithm and the validity of the approach in identifying and quantifying citrus fruits on sample trees.</p>
   </abstract>
   <trans-abstract xml:lang="en">
    <p>Accurate citrus fruit yield and estimation is of utmost importance for precise agricultural management. Unmanned aerial vehicle (UAV) remote-sensing systems present a compelling solution to this problem. These systems capture remote-sensing imagery with both high temporal and spatial resolution, thus empowering farmers with valuable insights for better decisionmaking. This research assessed the potential application of UAV imagery combined with the YOLOv7 object detection model for the precise estimation of citrus yield.&#13;
Images of citrus trees were captured in their natural field setting using a quadcopter-mounted UAV camera. Data augmentation techniques were applied to enhance the dataset diversity; the original YOLOv7 architecture and training parameters were modified to improve the model’s accuracy in detecting citrus fruits.&#13;
The test results demonstrated commendable performance, with a precision of 96%, a recall of 100%, and an F1-score of 97.95%. The correlation between the fruit numbers recognized by the algorithm and the actual fruit numbers from 20 sample trees provided the coefficient R2 of 0.98.&#13;
The strong positive correlation confirmed both the accuracy of the algorithm and the validity of the approach in identifying and quantifying citrus fruits on sample trees.</p>
   </trans-abstract>
   <kwd-group xml:lang="ru">
    <kwd>Agricultural management</kwd>
    <kwd>unmanned aerial vehicle (UAV)</kwd>
    <kwd>remote-sensing systems</kwd>
    <kwd>YOLOv7 object detection model</kwd>
    <kwd>crop yield estimation</kwd>
   </kwd-group>
   <kwd-group xml:lang="en">
    <kwd>Agricultural management</kwd>
    <kwd>unmanned aerial vehicle (UAV)</kwd>
    <kwd>remote-sensing systems</kwd>
    <kwd>YOLOv7 object detection model</kwd>
    <kwd>crop yield estimation</kwd>
   </kwd-group>
  </article-meta>
 </front>
 <body>
  <p></p>
 </body>
 <back>
  <ref-list>
   <ref id="B1">
    <label>1.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Marani R, Milella A, Petitti A, Reina G. Deep neural networks for grape bunch segmentation in natural images from a consumer-grade camera. Precision Agriculture. 2021;22:387–413. https://doi.org/10.1007/s11119-020-09736-0</mixed-citation>
     <mixed-citation xml:lang="en">Marani R, Milella A, Petitti A, Reina G. Deep neural networks for grape bunch segmentation in natural images from a consumer-grade camera. Precision Agriculture. 2021;22:387–413. https://doi.org/10.1007/s11119-020-09736-0</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B2">
    <label>2.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Gongal A, Amatya S, Karkee M, Zhang Q, Lewis K. Sensors and systems for fruit detection and localization: A review. Computers and Electronics in Agriculture. 2015;116:8–19. https://doi.org/10.1016/j.compag.2015.05.021</mixed-citation>
     <mixed-citation xml:lang="en">Gongal A, Amatya S, Karkee M, Zhang Q, Lewis K. Sensors and systems for fruit detection and localization: A review. Computers and Electronics in Agriculture. 2015;116:8–19. https://doi.org/10.1016/j.compag.2015.05.021</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B3">
    <label>3.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Sengupta S, Lee WS. Identification and determination of the number of immature green citrus fruit in a canopy under different ambient light conditions. Biosystems Engineering. 2014;117:51–61. https://doi.org/10.1016/j.biosystemseng.2013.07.007</mixed-citation>
     <mixed-citation xml:lang="en">Sengupta S, Lee WS. Identification and determination of the number of immature green citrus fruit in a canopy under different ambient light conditions. Biosystems Engineering. 2014;117:51–61. https://doi.org/10.1016/j.biosystemseng.2013.07.007</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B4">
    <label>4.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Maldonado Jr W, Barbosa JC. Automatic green fruit counting in orange trees using digital images. Computers and Electronics in Agriculture. 2016;127:572–581. https://doi.org/10.1016/j.compag.2016.07.023</mixed-citation>
     <mixed-citation xml:lang="en">Maldonado Jr W, Barbosa JC. Automatic green fruit counting in orange trees using digital images. Computers and Electronics in Agriculture. 2016;127:572–581. https://doi.org/10.1016/j.compag.2016.07.023</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B5">
    <label>5.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Zhao C, Lee WS, He D. Immature green citrus detection based on colour feature and sum of absolute transformed difference (SATD) using colour images in the citrus grove. Computers and Electronics in Agriculture. 2016;124:243–253. https://doi.org/10.1016/j.compag.2016.04.009</mixed-citation>
     <mixed-citation xml:lang="en">Zhao C, Lee WS, He D. Immature green citrus detection based on colour feature and sum of absolute transformed difference (SATD) using colour images in the citrus grove. Computers and Electronics in Agriculture. 2016;124:243–253. https://doi.org/10.1016/j.compag.2016.04.009</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B6">
    <label>6.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Dorj U-O, Lee M, Yun S. An yield estimation in citrus orchards via fruit detection and counting using image processing. Computers and Electronics in Agriculture. 2017;140:103–112. https://doi.org/10.1016/j.compag.2017.05.019</mixed-citation>
     <mixed-citation xml:lang="en">Dorj U-O, Lee M, Yun S. An yield estimation in citrus orchards via fruit detection and counting using image processing. Computers and Electronics in Agriculture. 2017;140:103–112. https://doi.org/10.1016/j.compag.2017.05.019</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B7">
    <label>7.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Liu T-H, Ehsani R, Toudeshki A, Zou X-J, Wang H-J. Detection of citrus fruit and tree trunks in natural environments using a multi-elliptical boundary model. Computers in Industry. 2018;99:9–16. https://doi.org/10.1016/j.compind.2018.03.007</mixed-citation>
     <mixed-citation xml:lang="en">Liu T-H, Ehsani R, Toudeshki A, Zou X-J, Wang H-J. Detection of citrus fruit and tree trunks in natural environments using a multi-elliptical boundary model. Computers in Industry. 2018;99:9–16. https://doi.org/10.1016/j.compind.2018.03.007</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B8">
    <label>8.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Liu S, Yang C, Hu Y, Huang L, Xiong L. A method for segmentation and recognition of mature citrus and branches-leaves based on regional features. In: Wang Y, Jiang Z, Peng Y, editors. Image and graphics technologies and applications. Singapore: Springer; 2018. pp. 292–301. https://doi.org/10.1007/978-981-13-1702-6_29</mixed-citation>
     <mixed-citation xml:lang="en">Liu S, Yang C, Hu Y, Huang L, Xiong L. A method for segmentation and recognition of mature citrus and branches-leaves based on regional features. In: Wang Y, Jiang Z, Peng Y, editors. Image and graphics technologies and applications. Singapore: Springer; 2018. pp. 292–301. https://doi.org/10.1007/978-981-13-1702-6_29</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B9">
    <label>9.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Xu L, Zhu S, Chen X, Wang Y, Kang Z, Huang P, et al. Citrus recognition in real scenarios based on machine vision. DYNA. Ingenier´ıa e Industria. 2020;95(1):87–93. https://doi.org/10.6036/9363</mixed-citation>
     <mixed-citation xml:lang="en">Xu L, Zhu S, Chen X, Wang Y, Kang Z, Huang P, et al. Citrus recognition in real scenarios based on machine vision. DYNA. Ingenier´ıa e Industria. 2020;95(1):87–93. https://doi.org/10.6036/9363</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B10">
    <label>10.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Zhang X, Toudeshki A, Ehsani R, Li H, Zhang W, Ma R. Yield estimation of citrus fruit using rapid image processing in natural background. Smart Agricultural Technology. 2022;2:100027. https://doi.org/10.1016/j.atech.2021.100027</mixed-citation>
     <mixed-citation xml:lang="en">Zhang X, Toudeshki A, Ehsani R, Li H, Zhang W, Ma R. Yield estimation of citrus fruit using rapid image processing in natural background. Smart Agricultural Technology. 2022;2:100027. https://doi.org/10.1016/j.atech.2021.100027</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B11">
    <label>11.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Maheswari P, Raja P, Apolo-Apolo OE, Perez-Ruiz M. Intelligent fruit yield estimation for orchards using deep learning based semantic segmentation techniques – A review. Frontiers in Plant Science. 2021;12:684328. https://doi.org/10.3389/fpls.2021.684328</mixed-citation>
     <mixed-citation xml:lang="en">Maheswari P, Raja P, Apolo-Apolo OE, Perez-Ruiz M. Intelligent fruit yield estimation for orchards using deep learning based semantic segmentation techniques – A review. Frontiers in Plant Science. 2021;12:684328. https://doi.org/10.3389/fpls.2021.684328</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B12">
    <label>12.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Yamamoto K, Guo W, Yoshioka Y, Ninomiya S. On plant detection of intact tomato fruits using image analysis and machine learning methods. Sensors. 2014;14(7):12191–12206. https://doi.org/10.3390/s140712191</mixed-citation>
     <mixed-citation xml:lang="en">Yamamoto K, Guo W, Yoshioka Y, Ninomiya S. On plant detection of intact tomato fruits using image analysis and machine learning methods. Sensors. 2014;14(7):12191–12206. https://doi.org/10.3390/s140712191</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B13">
    <label>13.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Loddo A, Loddo M, Di Ruberto C. A novel deep learning based approach for seed image classification and retrieval. Computers and Electronics in Agriculture. 2021;187:106269. https://doi.org/10.1016/j.compag.2021.106269</mixed-citation>
     <mixed-citation xml:lang="en">Loddo A, Loddo M, Di Ruberto C. A novel deep learning based approach for seed image classification and retrieval. Computers and Electronics in Agriculture. 2021;187:106269. https://doi.org/10.1016/j.compag.2021.106269</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B14">
    <label>14.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Han B-G, Lee J-G, Lim K-T, Choi D-H. Design of a scalable and fast YOLO for edge-computing devices. Sensors. 2020;20(23):6779. https://doi.org/10.3390/s20236779</mixed-citation>
     <mixed-citation xml:lang="en">Han B-G, Lee J-G, Lim K-T, Choi D-H. Design of a scalable and fast YOLO for edge-computing devices. Sensors. 2020;20(23):6779. https://doi.org/10.3390/s20236779</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B15">
    <label>15.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Sivakumar ANV, Li J, Scott S, Psota E, Jhala AJ, Luck JD, et al. Comparison of object detection and patch-based classification deep learning models on mid-to late-season weed detection in UAV imagery. Remote Sensing. 2020;12(13):2136. https://doi.org/10.3390/rs12132136</mixed-citation>
     <mixed-citation xml:lang="en">Sivakumar ANV, Li J, Scott S, Psota E, Jhala AJ, Luck JD, et al. Comparison of object detection and patch-based classification deep learning models on mid-to late-season weed detection in UAV imagery. Remote Sensing. 2020;12(13):2136. https://doi.org/10.3390/rs12132136</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B16">
    <label>16.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Liu W, Anguelov D, Erhan D, Szegedy C, Reed S, Fu C-Y, et al. SSD: Single shot multibox detector. In: Leibe B, Matas J, Sebe N, Welling M, editors. Computer Vision – ECCV 2016. Cham: Springer; 2016. pp. 21–37. https://doi.org/10.1007/978-3-319-46448-0_2</mixed-citation>
     <mixed-citation xml:lang="en">Liu W, Anguelov D, Erhan D, Szegedy C, Reed S, Fu C-Y, et al. SSD: Single shot multibox detector. In: Leibe B, Matas J, Sebe N, Welling M, editors. Computer Vision – ECCV 2016. Cham: Springer; 2016. pp. 21–37. https://doi.org/10.1007/978-3-319-46448-0_2</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B17">
    <label>17.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Redmon J, Divvala S, Girshick R, Farhadi A. You only look once: Unified, real-time object detection. Proceedings of the 2016 IEEE Conference on Computer Vision and Pattern Recognition; 2016; Vegas. IEEE; 2016. pp. 779– 788. https://doi.org/10.1109/CVPR.2016.91</mixed-citation>
     <mixed-citation xml:lang="en">Redmon J, Divvala S, Girshick R, Farhadi A. You only look once: Unified, real-time object detection. Proceedings of the 2016 IEEE Conference on Computer Vision and Pattern Recognition; 2016; Vegas. IEEE; 2016. pp. 779– 788. https://doi.org/10.1109/CVPR.2016.91</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B18">
    <label>18.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Redmon J, Farhadi A. YOLO9000: better, faster, stronger. Proceedings of the 2017 IEEE Conference on Computer Vision and Pattern Recognition; 2017; Honolulu. IEEE; 2017. pp. 7263–7271. https://doi.org/10.1109/CVPR.2017.690</mixed-citation>
     <mixed-citation xml:lang="en">Redmon J, Farhadi A. YOLO9000: better, faster, stronger. Proceedings of the 2017 IEEE Conference on Computer Vision and Pattern Recognition; 2017; Honolulu. IEEE; 2017. pp. 7263–7271. https://doi.org/10.1109/CVPR.2017.690</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B19">
    <label>19.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Redmon J, Farhadi A. YOLOv3: An incremental improvement. 2018. https://doi.org/10.48550/arXiv.1804.02767</mixed-citation>
     <mixed-citation xml:lang="en">Redmon J, Farhadi A. YOLOv3: An incremental improvement. 2018. https://doi.org/10.48550/arXiv.1804.02767</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B20">
    <label>20.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Bochkovskiy A, Wang C-Y, Liao H-YM. YOLOv4: Optimal speed and accuracy of object detection. 2020. https://doi.org/10.48550/arXiv.2004.10934</mixed-citation>
     <mixed-citation xml:lang="en">Bochkovskiy A, Wang C-Y, Liao H-YM. YOLOv4: Optimal speed and accuracy of object detection. 2020. https://doi.org/10.48550/arXiv.2004.10934</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B21">
    <label>21.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Jocher G, Chaurasia A, Stoken A, Borovec J, Kwon Y, Fang J, et al. ultralytics/yolov5: v6.1 – TensorRT, TensorFlow edge TPU and OpenVINO export and inference. Zenodo. 2022. https://doi.org/10.5281/zenodo.6222936</mixed-citation>
     <mixed-citation xml:lang="en">Jocher G, Chaurasia A, Stoken A, Borovec J, Kwon Y, Fang J, et al. ultralytics/yolov5: v6.1 – TensorRT, TensorFlow edge TPU and OpenVINO export and inference. Zenodo. 2022. https://doi.org/10.5281/zenodo.6222936</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B22">
    <label>22.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Ge Z, Liu S, Wang F, Li Z, Sun J. YOLOX: Exceeding YOLO series in 2021. 2021. https://doi.org/10.48550/arXiv.2107.08430</mixed-citation>
     <mixed-citation xml:lang="en">Ge Z, Liu S, Wang F, Li Z, Sun J. YOLOX: Exceeding YOLO series in 2021. 2021. https://doi.org/10.48550/arXiv.2107.08430</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B23">
    <label>23.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Wang C-Y, Bochkovskiy A, Liao H-YM. YOLOv7: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors. Proceedings of the 2023 IEEE/CVF Conference on Computer Vision and Pattern Recognition; 2023; Vancouver. IEEE; 2023. pp. 7464–7475. https://doi.org/10.1109/CVPR52729.2023.00721</mixed-citation>
     <mixed-citation xml:lang="en">Wang C-Y, Bochkovskiy A, Liao H-YM. YOLOv7: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors. Proceedings of the 2023 IEEE/CVF Conference on Computer Vision and Pattern Recognition; 2023; Vancouver. IEEE; 2023. pp. 7464–7475. https://doi.org/10.1109/CVPR52729.2023.00721</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B24">
    <label>24.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Girshick R. Fast r-cnn. Proceedings of the 2015 IEEE International Conference on Computer Vision; 2015; Santiago. IEEE; 2015. pp. 1440–1448. https://doi.org/10.1109/ICCV.2015.169</mixed-citation>
     <mixed-citation xml:lang="en">Girshick R. Fast r-cnn. Proceedings of the 2015 IEEE International Conference on Computer Vision; 2015; Santiago. IEEE; 2015. pp. 1440–1448. https://doi.org/10.1109/ICCV.2015.169</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B25">
    <label>25.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Ren S, He K, Girshick R, Sun J. Faster R-CNN: Towards real-time object detection with region proposal networks. In: Cortes C, Lawrence N, Lee D, Sugiyama M, Garnett R, editors. Advances in neural information processing systems. Purchase Printed Proceeding; 2015.</mixed-citation>
     <mixed-citation xml:lang="en">Ren S, He K, Girshick R, Sun J. Faster R-CNN: Towards real-time object detection with region proposal networks. In: Cortes C, Lawrence N, Lee D, Sugiyama M, Garnett R, editors. Advances in neural information processing systems. Purchase Printed Proceeding; 2015.</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B26">
    <label>26.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Lucena F, Breunig FM, Kux H. The combined use of UAV-based RGB and DEM images for the detection and delineation of orange tree crowns with mask R-CNN: An approach of labeling and unified framework. Future Internet. 2022;14(10):275. https://doi.org/10.3390/fi14100275</mixed-citation>
     <mixed-citation xml:lang="en">Lucena F, Breunig FM, Kux H. The combined use of UAV-based RGB and DEM images for the detection and delineation of orange tree crowns with mask R-CNN: An approach of labeling and unified framework. Future Internet. 2022;14(10):275. https://doi.org/10.3390/fi14100275</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B27">
    <label>27.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Xu L, Wang Y, Shi X, Tang Z, Chen X, Wang Y, et al. Real-time and accurate detection of citrus in complex scenes based on HPL-YOLOv4. Computers and Electronics in Agriculture. 2023;205:107590. https://doi.org/10.1016/j.compag.2022.107590</mixed-citation>
     <mixed-citation xml:lang="en">Xu L, Wang Y, Shi X, Tang Z, Chen X, Wang Y, et al. Real-time and accurate detection of citrus in complex scenes based on HPL-YOLOv4. Computers and Electronics in Agriculture. 2023;205:107590. https://doi.org/10.1016/j.compag.2022.107590</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B28">
    <label>28.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Yang R, Hu Y, Yao Y, Gao M, Liu R. Fruit target detection based on BCo-YOLOv5 model. Mobile Information Systems. 2022;2022:8457173. https://doi.org/10.1155/2022/8457173</mixed-citation>
     <mixed-citation xml:lang="en">Yang R, Hu Y, Yao Y, Gao M, Liu R. Fruit target detection based on BCo-YOLOv5 model. Mobile Information Systems. 2022;2022:8457173. https://doi.org/10.1155/2022/8457173</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B29">
    <label>29.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Lai Y, Ma R, Chen Y, Wan T, Jiao R, He H. A pineapple target detection method in a field environment based on improved YOLOv7. Applied Sciences. 2023;13(4):2691. https://doi.org/10.3390/app13042691</mixed-citation>
     <mixed-citation xml:lang="en">Lai Y, Ma R, Chen Y, Wan T, Jiao R, He H. A pineapple target detection method in a field environment based on improved YOLOv7. Applied Sciences. 2023;13(4):2691. https://doi.org/10.3390/app13042691</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B30">
    <label>30.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Chen J, Liu H, Zhang Y, Zhang D, Ouyang H, Chen X. A multiscale lightweight and efficient model based on YOLOv7: Applied to citrus orchard. Plants. 2022;11(23):3260. https://doi.org/10.3390/plants11233260</mixed-citation>
     <mixed-citation xml:lang="en">Chen J, Liu H, Zhang Y, Zhang D, Ouyang H, Chen X. A multiscale lightweight and efficient model based on YOLOv7: Applied to citrus orchard. Plants. 2022;11(23):3260. https://doi.org/10.3390/plants11233260</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B31">
    <label>31.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Yang H, Liu Y, Wang S, Qu H, Li N, Wu J, et al. Improved apple fruit target recognition method based on YOLOv7 model. Agriculture. 2023;13(7):1278. https://doi.org/10.3390/agriculture13071278</mixed-citation>
     <mixed-citation xml:lang="en">Yang H, Liu Y, Wang S, Qu H, Li N, Wu J, et al. Improved apple fruit target recognition method based on YOLOv7 model. Agriculture. 2023;13(7):1278. https://doi.org/10.3390/agriculture13071278</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B32">
    <label>32.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Ministry of Agriculture. https://www.agriculture.gov.ma</mixed-citation>
     <mixed-citation xml:lang="en">Ministry of Agriculture. https://www.agriculture.gov.ma</mixed-citation>
    </citation-alternatives>
   </ref>
   <ref id="B33">
    <label>33.</label>
    <citation-alternatives>
     <mixed-citation xml:lang="ru">Zheng Z, Wang P, Liu W, Li J, Ye R, Ren D. Distance-IoU loss: Faster and better learning for bounding box regression. AAAI-20 Technical Tracks 7. 2020;34(7):12993–13000. https://doi.org/10.1609/aaai.v34i07.6999</mixed-citation>
     <mixed-citation xml:lang="en">Zheng Z, Wang P, Liu W, Li J, Ye R, Ren D. Distance-IoU loss: Faster and better learning for bounding box regression. AAAI-20 Technical Tracks 7. 2020;34(7):12993–13000. https://doi.org/10.1609/aaai.v34i07.6999</mixed-citation>
    </citation-alternatives>
   </ref>
  </ref-list>
 </back>
</article>
