<?xml version="1.0" encoding="UTF-8"?>
<article article-type="research-article" dtd-version="1.3" xml:lang="ru" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="https://metafora.rcsi.science/xsd_files/journal3.xsd">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">moitvivt</journal-id>
      <journal-title-group>
        <journal-title xml:lang="ru">Моделирование, оптимизация и информационные технологии</journal-title>
        <trans-title-group xml:lang="en">
          <trans-title>Modeling, Optimization and Information Technology</trans-title>
        </trans-title-group>
      </journal-title-group>
      <issn pub-type="epub">2310-6018</issn>
      <publisher>
        <publisher-name>Издательство</publisher-name>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="doi">10.26102/2310-6018/2025.50.3.007</article-id>
      <article-id pub-id-type="custom" custom-type="elpub">1955</article-id>
      <title-group>
        <article-title xml:lang="ru">Выбор акустических признаков в задачах обнаружения беспилотных летательных аппаратов</article-title>
        <trans-title-group xml:lang="en">
          <trans-title>Selection of acoustic features in unmanned aerial vehicle detection tasks</trans-title>
        </trans-title-group>
      </title-group>
      <contrib-group>
        <contrib contrib-type="author">
          <contrib-id contrib-id-type="orcid">0000-0002-3577-8838</contrib-id>
          <name-alternatives>
            <name name-style="eastern" xml:lang="ru">
              <surname>Прозоров</surname>
              <given-names>Дмитрий Евгеньевич</given-names>
            </name>
            <name name-style="western" xml:lang="en">
              <surname>Prozorov</surname>
              <given-names>Dmitriy Evgenievich</given-names>
            </name>
          </name-alternatives>
          <email>prozorov.de@gmail.com</email>
          <xref ref-type="aff">aff-1</xref>
        </contrib>
        <contrib contrib-type="author">
          <contrib-id contrib-id-type="orcid">0000-0002-3613-5949</contrib-id>
          <name-alternatives>
            <name name-style="eastern" xml:lang="ru">
              <surname>Бызов</surname>
              <given-names>Виктор Александрович</given-names>
            </name>
            <name name-style="western" xml:lang="en">
              <surname>Byzov</surname>
              <given-names>Viktor Alexandrovich</given-names>
            </name>
          </name-alternatives>
          <email>vbyzov@yandex.ru</email>
          <xref ref-type="aff">aff-2</xref>
        </contrib>
        <contrib contrib-type="author">
          <name-alternatives>
            <name name-style="eastern" xml:lang="ru">
              <surname>Мышкин</surname>
              <given-names>Роман Евгеньевич</given-names>
            </name>
            <name name-style="western" xml:lang="en">
              <surname>Myshkin</surname>
              <given-names>Roman Evgenievich</given-names>
            </name>
          </name-alternatives>
          <email>myshkin.r@niisvt.ru</email>
          <xref ref-type="aff">aff-3</xref>
        </contrib>
      </contrib-group>
      <aff-alternatives id="aff-1">
        <aff xml:lang="ru">Вятский государственный университет</aff>
        <aff xml:lang="en">Vyatka State University</aff>
      </aff-alternatives>
      <aff-alternatives id="aff-2">
        <aff xml:lang="ru">Вятский государственный университет</aff>
        <aff xml:lang="en">Vyatka State University</aff>
      </aff-alternatives>
      <aff-alternatives id="aff-3">
        <aff xml:lang="ru">Научно-исследовательский институт средств вычислительной техники</aff>
        <aff xml:lang="en">Scientific Research Institute of Computer Engineering</aff>
      </aff-alternatives>
      <pub-date pub-type="epub">
        <day>01</day>
        <month>01</month>
        <year>2026</year>
      </pub-date>
      <volume>1</volume>
      <issue>1</issue>
      <elocation-id>10.26102/2310-6018/2025.50.3.007</elocation-id>
      <permissions>
        <copyright-statement>Copyright © Авторы, 2026</copyright-statement>
        <copyright-year>2026</copyright-year>
        <license license-type="creative-commons-attribution" xlink:href="https://creativecommons.org/licenses/by/4.0/">
          <license-p>This work is licensed under a Creative Commons Attribution 4.0 International License</license-p>
        </license>
      </permissions>
      <self-uri xlink:href="https://moitvivt.ru/ru/journal/article?id=1955"/>
      <abstract xml:lang="ru">
        <p>С ростом числа инцидентов, связанных с неправомерным использованием беспилотных летательных аппаратов (БПЛА), повышается актуальность разработки эффективных методов их автоматического обнаружения. В статье выполнен краткий обзор современных подходов к обнаружению БПЛА, особое внимание уделено методам акустического мониторинга, обладающим рядом преимуществ по сравнению с радиочастотными и визуальными системами. Рассмотрены основные акустические признаки, используемые для распознавания звуковых сигналов дронов, а также методы их выделения с помощью открытых библиотек Librosa и Essentia. Для исследования эффективности различных признаков сформирован и использован сбалансированный датасет, включающий аудиозаписи дронов и фоновых шумов. Апробирована методика многоэтапного отбора признаков с применением библиотеки Feature-engine, включающая удаление неизменяющихся (константных) и дублирующихся признаков, корреляционный анализ и оценку значимости признаков. В результате получено подмножество из 53 акустических признаков, обеспечивающее компромисс между качеством обнаружения БПЛА и вычислительными затратами. Описаны математические основы формирования спектральных признаков, в том числе различные типы спектрограмм (мел-, барк- и гамматон-спектрограммы), векторные и скалярные акустические признаки. Полученные результаты могут быть использованы при построении систем автоматического акустического обнаружения БПЛА, базирующихся на методах машинного обучения.</p>
      </abstract>
      <trans-abstract xml:lang="en">
        <p>With the increasing number of incidents involving the unauthorized use of unmanned aerial vehicles (UAVs), the development of effective methods for their automatic detection has become increasingly relevant. This article provides a concise overview of current approaches to UAV detection, with particular emphasis on acoustic monitoring methods, which offer several advantages over radio-frequency and visual systems. The main acoustic features used for recognizing drone sound signals are examined, along with techniques for extracting these features using open-source libraries such as Librosa and Essentia. To evaluate the effectiveness of various features, a balanced dataset was compiled and utilized, containing audio recordings of drones and background noise. A multi-stage feature selection methodology was tested using the Feature-engine library, including the removal of constant and duplicate features, correlation analysis, and feature importance assessment. As a result, a subset of 53 acoustic features was obtained, providing a balance between UAV detection accuracy and computational cost. The mathematical foundations of spectral feature extraction are described, including different types of spectrograms (mel-, bark-, and gammatone-spectrograms), as well as vector and scalar acoustic features. The results presented can be used to develop automatic UAV acoustic detection systems based on machine learning methods.</p>
      </trans-abstract>
      <kwd-group xml:lang="ru">
        <kwd>беспилотный летательный аппарат</kwd>
        <kwd>акустические сигналы</kwd>
        <kwd>акустические признаки</kwd>
        <kwd>спектральный анализ</kwd>
        <kwd>машинное обучение</kwd>
      </kwd-group>
      <kwd-group xml:lang="en">
        <kwd>unmanned aerial vehicle</kwd>
        <kwd>acoustic signals</kwd>
        <kwd>acoustic features</kwd>
        <kwd>spectral analysis</kwd>
        <kwd>machine learning</kwd>
      </kwd-group>
      <funding-group>
        <funding-statement xml:lang="ru">Исследование выполнено без спонсорской поддержки.</funding-statement>
        <funding-statement xml:lang="en">The study was performed without external funding.</funding-statement>
      </funding-group>
    </article-meta>
  </front>
  <back>
    <ref-list>
      <title>References</title>
      <ref id="cit1">
        <label>1</label>
        <mixed-citation xml:lang="ru">Seidaliyeva U., Ilipbayeva L., Taissariyeva K., Smailov N., Matson E.T. Advances and Challenges in Drone Detection and Classification Techniques: A State-of-the-Art Review. Sensors. 2023;24(1). https://doi.org/10.3390/s24010125</mixed-citation>
      </ref>
      <ref id="cit2">
        <label>2</label>
        <mixed-citation xml:lang="ru">Lee H., Han S., Byeon J.-I., et al. CNN-Based UAV Detection and Classification Using Sensor Fusion. IEEE Access. 2023;11:68791–68808. https://doi.org/10.1109/ACCESS.2023.3293124</mixed-citation>
      </ref>
      <ref id="cit3">
        <label>3</label>
        <mixed-citation xml:lang="ru">Tejera-Berengue D., Zhu-Zhou F., Utrilla-Manso M., Gil-Pita R., Rosa-Zurera M. Analysis of Distance and Environmental Impact on UAV Acoustic Detection. Electronics. 2024;13(3). https://doi.org/10.3390/electronics13030643</mixed-citation>
      </ref>
      <ref id="cit4">
        <label>4</label>
        <mixed-citation xml:lang="ru">Patel K., Ramirez L., Canales D., Rojas E. Unmanned Aerial Vehicles Detection Using Acoustics and Quantum Signal Processing. In: 2024 AIAA Science and Technology Forum and Exposition, 08–12 January 2024, Orlando, FL, USA. American Institute of Aeronautics and Astronautics; 2024. https://doi.org/10.2514/6.2024-1740</mixed-citation>
      </ref>
      <ref id="cit5">
        <label>5</label>
        <mixed-citation xml:lang="ru">Taha B., Shoufan A. Machine Learning-Based Drone Detection and Classification: State-of-the-Art in Research. IEEE Access. 2019;7:138669–138682. https://doi.org/10.1109/ACCESS.2019.2942944</mixed-citation>
      </ref>
      <ref id="cit6">
        <label>6</label>
        <mixed-citation xml:lang="ru">Najafi Ja., Mirzakuchaki S., Shamaghdari S. Autonomous Drone Detection and Classification Using Computer Vision and Prony Algorithm-Based Frequency Feature Extraction. Journal of Intelligent &amp; Robotic Systems. 2025;111(1). https://doi.org/10.1007/s10846-024-02216-x</mixed-citation>
      </ref>
      <ref id="cit7">
        <label>7</label>
        <mixed-citation xml:lang="ru">Zhang Yi.D., Xiang X., Li Yi, Chen G. Enhanced Micro-Doppler Feature Analysis for Drone Detection. In: 2021 IEEE Radar Conference (RadarConf21), 07–14 May 2021, Atlanta, GA, USA. IEEE; 2021. P. 1–4. https://doi.org/10.1109/RadarConf2147009.2021.9455228</mixed-citation>
      </ref>
      <ref id="cit8">
        <label>8</label>
        <mixed-citation xml:lang="ru">Souli N., Theodorou I., Kolios P., Ellinas G. Detection and Tracking of Rogue UASs Using a Novel Real-Time Passive Radar System. In: 2022 International Conference on Unmanned Aircraft Systems (ICUAS), 21–24 June 2022, Dubrovnik, Croatia. IEEE; 2022. P. 576–582. https://doi.org/10.1109/ICUAS54217.2022.9836054</mixed-citation>
      </ref>
      <ref id="cit9">
        <label>9</label>
        <mixed-citation xml:lang="ru">McCoy J., Rawat D.B. Optimized Machine Learning Based Multimodal UAV Detection Using Ensemble Stacking. In: 2024 IEEE 6th International Conference on Cognitive Machine Intelligence (CogMI), 28–31 October 2024, Washington, DC, USA. IEEE; 2024. P. 40–49. https://doi.org/10.1109/CogMI62246.2024.00016</mixed-citation>
      </ref>
      <ref id="cit10">
        <label>10</label>
        <mixed-citation xml:lang="ru">Zahid Rao A., Shahid Siddique S., Danish Mujib M., Abul Hasan M., Alokaily A.O., Tahira T. Sensor Fusion and Machine Learning for Seated Movement Detection with Trunk Orthosis. IEEE Access. 2024;12:41676–41687. https://doi.org/10.1109/ACCESS.2024.3377111</mixed-citation>
      </ref>
      <ref id="cit11">
        <label>11</label>
        <mixed-citation xml:lang="ru">Wang Ye, Chen Yu., Choi J., Kuo C.-C.J. Towards Visible and Thermal Drone Monitoring with Convolutional Neural Networks. APSIPA Transactions on Signal and Information Processing. 2019;8(1). https://doi.org/10.1017/ATSIP.2018.30</mixed-citation>
      </ref>
      <ref id="cit12">
        <label>12</label>
        <mixed-citation xml:lang="ru">Guo Ju., Ahmad I., Chang K. Classification, Positioning, and Tracking of Drones by HMM Using Acoustic Circular Microphone Array Beamforming. EURASIP Journal on Wireless Communications and Networking. 2020;2020(1). https://doi.org/10.1186/s13638-019-1632-9</mixed-citation>
      </ref>
      <ref id="cit13">
        <label>13</label>
        <mixed-citation xml:lang="ru">Diao Yu., Zhang Yi., Zhao G., Khamis M. Drone Authentication via Acoustic Fingerprint. In: ACSAC '22: Proceedings of the 38th Annual Computer Security Applications Conference, 05–09 December 2022, Austin, TX, USA. New York: Association for Computing Machinery; 2022. P. 658–668. https://doi.org/10.1145/3564625.3564653</mixed-citation>
      </ref>
      <ref id="cit14">
        <label>14</label>
        <mixed-citation xml:lang="ru">Deleforge A., Carlo D.D., Strauss M., Serizel R., Marcenaro L. Audio-Based Search and Rescue with a Drone: Highlights from the IEEE Signal Processing Cup 2019 Student Competition. IEEE Signal Processing Magazine. 2019;36(5):138–144. https://doi.org/10.1109/MSP.2019.2924687</mixed-citation>
      </ref>
      <ref id="cit15">
        <label>15</label>
        <mixed-citation xml:lang="ru">Marple S.L., Jr. Digital Spectral Analysis. Mineola, New York: Dover Publications; 2019. 432 p.</mixed-citation>
      </ref>
      <ref id="cit16">
        <label>16</label>
        <mixed-citation xml:lang="ru">Haykin S., Liu K.J.R. Handbook on Array Processing and Sensor Networks. Hoboken: John Wiley &amp; Sons; 2009. 924 p.</mixed-citation>
      </ref>
      <ref id="cit17">
        <label>17</label>
        <mixed-citation xml:lang="ru">Flanagan J.L. Speech Analysis Synthesis and Perception. Berlin, Heidelberg: Springer; 1972. 446 p. https://doi.org/10.1007/978-3-662-01562-9</mixed-citation>
      </ref>
      <ref id="cit18">
        <label>18</label>
        <mixed-citation xml:lang="ru">O’Shaughnessy D. Speech Communication: Human and Machine. Reading: Addison-Wesley; 1990. 548 p.</mixed-citation>
      </ref>
      <ref id="cit19">
        <label>19</label>
        <mixed-citation xml:lang="ru">Traunmüller H. Analytical Expressions for the Tonotopic Sensory Scale. The Journal of the Acoustical Society of America. 1990;88(1):97–100.</mixed-citation>
      </ref>
      <ref id="cit20">
        <label>20</label>
        <mixed-citation xml:lang="ru">Van Gisbergen J.A.M., Grashuis J.L., Johannesma P.I.M., Vendrik A.J.H. Neurons in the Cochlear Nucleus Investigated with Tone and Noise Stimuli. Experimental Brain Research. 1975;23(4):387–406. https://doi.org/10.1007/BF00238022</mixed-citation>
      </ref>
      <ref id="cit21">
        <label>21</label>
        <mixed-citation xml:lang="ru">Davis S., Mermelstein P. Comparison of Parametric Representations for Monosyllabic Word Recognition in Continuously Spoken Sentences. IEEE Transactions on Acoustics, Speech, and Signal Processing. 1980;28(4):357–366. https://doi.org/10.1109/TASSP.1980.1163420</mixed-citation>
      </ref>
      <ref id="cit22">
        <label>22</label>
        <mixed-citation xml:lang="ru">Xu M., Duan L.-Yu, Cai J., Chia L.-T., Xu Ch., Tian Q. HMM-Based Audio Keyword Generation. In: Advances in Multimedia Information Processing – PCM 2004: 5th Pacific Rim Conference on Multimedia: Proceedings: Part III, 30 November – 03 December 2004, Tokyo, Japan. Berlin, Heidelberg: Springer; 2004. P. 566–574. https://doi.org/10.1007/978-3-540-30543-9_71</mixed-citation>
      </ref>
      <ref id="cit23">
        <label>23</label>
        <mixed-citation xml:lang="ru">Qi J., Wang D., Xu J., Tejedor J. Bottleneck Features Based on Gammatone Frequency Cepstral Coefficients. In: INTERSPEECH 2013: 14th Annual Conference of the International Speech Communication Association, 25–29 August 2013, Lyon, France. ISCA; 2013. P. 1751–1755. https://doi.org/10.21437/Interspeech.2013-435</mixed-citation>
      </ref>
      <ref id="cit24">
        <label>24</label>
        <mixed-citation xml:lang="ru">Bartsch M.A., Wakefield G.H. Audio Thumbnailing of Popular Music Using Chroma-Based Representations. IEEE Transactions on Multimedia. 2005;7(1):96–104. https://doi.org/10.1109/TMM.2004.840597</mixed-citation>
      </ref>
      <ref id="cit25">
        <label>25</label>
        <mixed-citation xml:lang="ru">Müller M., Kurth F., Clausen M. Audio Matching via Chroma-Based Statistical Features. In: ISMIR 2005: 6th International Conference on Music Information Retrieval: Proceedings, 11–15 September 2005, London, UK. 2005. P. 288–295. https://doi.org/10.5281/zenodo.1416799</mixed-citation>
      </ref>
      <ref id="cit26">
        <label>26</label>
        <mixed-citation xml:lang="ru">Jiang D.-N., Lu L., Zhang H.-J., Tao J.-H., Cai L.-H. Music Type Classification by Spectral Contrast Feature. In: IEEE International Conference on Multimedia and Expo: Proceedings, 26–29 August 2002, Lausanne, Switzerland. IEEE; 2002. P. 113–116. https://doi.org/10.1109/ICME.2002.1035731</mixed-citation>
      </ref>
      <ref id="cit27">
        <label>27</label>
        <mixed-citation xml:lang="ru">De Cheveigné A., Kawahara H. YIN, a Fundamental Frequency Estimator for Speech and Music. The Journal of the Acoustical Society of America. 2002;111(4):1917–1930.</mixed-citation>
      </ref>
      <ref id="cit28">
        <label>28</label>
        <mixed-citation xml:lang="ru">Klapuri A. Qualitative and Quantitative Aspects in the Design of Periodicity Estimation Algorithms. In: 2000 10th European Signal Processing Conference, 04–08 September 2000, Tampere, Finland. IEEE; 2000. P. 1–4.</mixed-citation>
      </ref>
    </ref-list>
    <fn-group>
      <fn fn-type="conflict">
        <p>The authors declare that there are no conflicts of interest present.</p>
      </fn>
    </fn-group>
  </back>
</article>