<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE root>
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" article-type="other" dtd-version="1.2" xml:lang="en"><front><journal-meta><journal-id journal-id-type="publisher-id">Cancer Urology</journal-id><journal-title-group><journal-title xml:lang="en">Cancer Urology</journal-title><trans-title-group xml:lang="ru"><trans-title>Онкоурология</trans-title></trans-title-group></journal-title-group><issn publication-format="print">1726-9776</issn><issn publication-format="electronic">1996-1812</issn><publisher><publisher-name xml:lang="en">Publishing House ABV Press</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">1864</article-id><article-id pub-id-type="doi">10.17650/1726-9776-2024-20-4-15-23</article-id><article-categories><subj-group subj-group-type="toc-heading" xml:lang="en"><subject>DIAGNOSIS AND TREATMENT OF URINARY SYSTEM TUMORS. PROSTATE CANCER</subject></subj-group><subj-group subj-group-type="toc-heading" xml:lang="ru"><subject>ДИАГНОСТИКА И ЛЕЧЕНИЕ ОПУХОЛЕЙ МОЧЕПОЛОВОЙ СИСТЕМЫ. Рак предстательной железы</subject></subj-group><subj-group subj-group-type="article-type"><subject></subject></subj-group></article-categories><title-group><article-title xml:lang="en">Development of a deep learning-based system for aiding in the determination of Prostate Imaging Reporting and Data System (PI-RADS) scores: an international multicenter study</article-title><trans-title-group xml:lang="ru"><trans-title>Разработка системы на базе глубокого обучения для помощи в принятии врачебных решений в определении оценок по системе PI-RADS: международное многоцентровое исследование</trans-title></trans-title-group></title-group><contrib-group><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-0601-4713</contrib-id><name><surname>He</surname><given-names>Mingze</given-names></name><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Build. 1, 2 Bol’shaya Pirogovskaya St., Moscow 119435</p></bio><bio xml:lang="ru"><p>119435 Москва, ул. Большая Пироговская, 2, стр. 1</p></bio><email>hemingze97@gmail.com</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-3007-1315</contrib-id><name-alternatives><name xml:lang="en"><surname>Enikeev</surname><given-names>M. E.</given-names></name><name xml:lang="ru"><surname>Еникеев</surname><given-names>М. Э.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Build. 1, 2 Bol’shaya Pirogovskaya St., Moscow 119435</p></bio><bio xml:lang="ru"><p>Еникеев Михаил Эликович.</p><p>119435 Москва, ул. Большая Пироговская, 2, стр. 1</p></bio><email>enikmic@mail.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-6005-6247</contrib-id><name-alternatives><name xml:lang="en"><surname>Rzaev</surname><given-names>R. T.</given-names></name><name xml:lang="ru"><surname>Рзаев</surname><given-names>Р. Т.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Build. 1, 6 Bol’shaya Pirogovskaya St., Moscow 119435</p></bio><bio xml:lang="ru"><p>Рзаев Рамин Теймурхан оглы.</p><p>119435 Москва, ул. Большая Пироговская, 6, стр. 1</p></bio><email>ramin-rz@mail.ru</email><xref ref-type="aff" rid="aff2"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0001-5968-9883</contrib-id><name-alternatives><name xml:lang="en"><surname>Chernenkiy</surname><given-names>I. M.</given-names></name><name xml:lang="ru"><surname>Черненький</surname><given-names>И. М.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Build. 1, 2 Bol’shaya Pirogovskaya St., Moscow 119435</p></bio><bio xml:lang="ru"><p>Черненький Иван Михайлович.</p><p>119435 Москва, ул. Большая Пироговская, 2, стр. 1</p></bio><email>chernenkiy_i_m@staff.sechenov.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0001-6808-7489</contrib-id><name-alternatives><name xml:lang="en"><surname>Feldsherov</surname><given-names>M. V.</given-names></name><name xml:lang="ru"><surname>Фельдшеров</surname><given-names>М. В.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Build. 1, 6 Bol’shaya Pirogovskaya St., Moscow 119435</p></bio><bio xml:lang="ru"><p>Фельдшеров Михаил Викторович.</p><p>119435 Москва, ул. Большая Пироговская, 6, стр. 1</p></bio><email>feldsherov_m_v@staff.sechenov.ru</email><xref ref-type="aff" rid="aff2"/></contrib><contrib contrib-type="author"><name><surname>Li</surname><given-names>He</given-names></name><address><country country="CN">China</country></address><bio xml:lang="en"><p>Changchun</p></bio><bio xml:lang="ru"><p>Чанчунь</p></bio><email>lihe2018@jlu.edu.cn</email><xref ref-type="aff" rid="aff3"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-2860-276X</contrib-id><name><surname>Hu</surname><given-names>Kebang</given-names></name><address><country country="CN">China</country></address><bio xml:lang="en"><p>Changchun</p></bio><bio xml:lang="ru"><p>Чанчунь</p></bio><email>hukb@jlu.edu.cn</email><xref ref-type="aff" rid="aff4"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-1121-9430</contrib-id><name-alternatives><name xml:lang="en"><surname>Shpot</surname><given-names>E. V.</given-names></name><name xml:lang="ru"><surname>Шпоть</surname><given-names>Е. В.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Build. 1, 2 Bol’shaya Pirogovskaya St., Moscow 119435</p></bio><bio xml:lang="ru"><p>Шпоть Евгений Валерьевич.</p><p>119435 Москва, ул. Большая Пироговская, 2, стр. 1</p></bio><email>shpot_e_v@staff.sechenov.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0001-7787-1240</contrib-id><name-alternatives><name xml:lang="en"><surname>Rapoport</surname><given-names>L. M.</given-names></name><name xml:lang="ru"><surname>Рапопорт</surname><given-names>Л. М.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Build. 1, 2 Bol’shaya Pirogovskaya St., Moscow 119435</p></bio><bio xml:lang="ru"><p>Рапопорт Леонид Михайлович.</p><p>119435 Москва, ул. Большая Пироговская, 2, стр. 1</p></bio><email>leonidrapoport@yandex.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-5541-2251</contrib-id><name-alternatives><name xml:lang="en"><surname>Glybochko</surname><given-names>P. V.</given-names></name><name xml:lang="ru"><surname>Глыбочко</surname><given-names>П. В.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Build. 1, 2 Bol’shaya Pirogovskaya St., Moscow 119435</p></bio><bio xml:lang="ru"><p>Глыбочко Петр Витальевич.</p><p>119435 Москва, ул. Большая Пироговская, 2, стр. 1</p></bio><email>rector@staff.sechenov.ru</email><xref ref-type="aff" rid="aff1"/></contrib></contrib-group><aff-alternatives id="aff1"><aff><institution xml:lang="en">Institute for Urology and Reproductive Health, I.M. Sechenov First Moscow State Medical University, Ministry of Health of Russia (Sechenov University)</institution></aff><aff><institution xml:lang="ru">Институт урологии и репродуктивного здоровья человека ФГАОУ ВО Первый Московский государственный медицинский университет им. И.М. Сеченова Минздрава России (Сеченовский Университет)</institution></aff></aff-alternatives><aff-alternatives id="aff2"><aff><institution xml:lang="en">Department of Radiology, The Second University Hospital, I.M. Sechenov First Moscow State Medical University, Ministry of Health of Russia (Sechenov University)</institution></aff><aff><institution xml:lang="ru">Отделение лучевой диагностики Университетской клинической больницы № 2 ФГАОУ ВО Первый Московский государственный медицинский университет им. И.М. Сеченова Минздрава России (Сеченовский Университет)</institution></aff></aff-alternatives><aff-alternatives id="aff3"><aff><institution xml:lang="en">Department of Radiology, The First Hospital of Jilin University</institution></aff><aff><institution xml:lang="ru">Отделение лучевой диагностики, Первая больница Цзилиньского университета</institution></aff></aff-alternatives><aff-alternatives id="aff4"><aff><institution xml:lang="en">Department of Urology, The First Hospital of Jilin University</institution></aff><aff><institution xml:lang="ru">Отделение урологии, Первая больница Цзилиньского университета</institution></aff></aff-alternatives><pub-date date-type="pub" iso-8601-date="2024-12-01" publication-format="electronic"><day>01</day><month>12</month><year>2024</year></pub-date><volume>20</volume><issue>4</issue><issue-title xml:lang="en"/><issue-title xml:lang="ru"/><fpage>15</fpage><lpage>23</lpage><history><date date-type="received" iso-8601-date="2024-10-30"><day>30</day><month>10</month><year>2024</year></date><date date-type="accepted" iso-8601-date="2025-01-21"><day>21</day><month>01</month><year>2025</year></date></history><permissions><copyright-year>2024</copyright-year><ali:free_to_read xmlns:ali="http://www.niso.org/schemas/ali/1.0/"/></permissions><self-uri xlink:href="https://oncourology.abvpress.ru/oncur/article/view/1864">https://oncourology.abvpress.ru/oncur/article/view/1864</self-uri><abstract xml:lang="en"><p><bold>Background</bold>. Prostate multiparametric magnetic resonance imaging is widely recommended prior to biopsy in clinical practice, with the Prostate Imaging Reporting and Data System (PI-RADS) as the standard tool for guiding diagnosis and treatment decisions. However, analyzing multiparametric magnetic resonance imaging data demands substantial expertise, and the process is often time-intensive and cognitively challenging, leading to variability between and within readers.</p><p><bold>Aim</bold>. To create a deep learning-based computer-aided diagnosis (DL-CAD) system to minimize manual influence on PI-RADS score determination.</p><p><bold>Materials and methods</bold>. Between January 2020 and May 2024, 108 patients with histopathologically confirmed prostate cancer with PI-RADS scores 4–5 were retrospectively selected for model development and training. Additionally, 28 benign cases were included for model validation. Different prostate zones were labeled following PI-RADS v2.1 guidelines to facilitate model selection. Manual segmentation of prostate regions and lesions was performed on T2-weighted (T2W) sequences, and a 3D U-Net architecture was implemented for the DL model using the MONAI framework. Diagnostic performance was assessed using Python-based statistical analysis.</p><p><bold>Results</bold>. The DL-CAD system achieved average accuracy of 78 %, sensitivity of 60 %, and specificity of 84 % for lesion detection. The Dice similarity coefficient for prostate segmentation was 0.71, and the AUROC was 81.16 %.</p><p><bold>Conclusion</bold>. The DL-CAD system demonstrates promise for patients with clinically significant prostate cancer by improving diagnostic accuracy. While it exhibits high specificity, further improvements of sensitivity and segmentation accuracy are necessary. These improvements could be achieved through the use of larger datasets and advanced deep learning techniques, such as transfer learning or ensemble learning, which could enhance sensitivity without compromising specificity. Further multicenter validation is required to accelerate the integration of this system into clinical practice.</p></abstract><trans-abstract xml:lang="ru"><p><bold>Введение</bold>. Проведение мультипараметрической магнитно-резонансной томографии предстательной железы широко рекомендуется в клинической практике перед выполнением биопсии предстательной железы. Система отчетности данных магнитно-резонансной томографии предстательной железы (Prostate Imaging Reporting and Data System, PI-RADS) является стандартным инструментом для диагностики и принятия врачебных решений. Однако точный анализ данных мультипараметрической магнитно-резонансной томографии требует высокой экспертности, и трудоемкий, когнитивно насыщенный процесс часто снижает согласованность оценок между разными специалистами и у одного и того же специалиста.</p><p><bold>Цель исследования </bold>– разработка системы компьютерной диагностики на базе глубокого обучения (DL-CAD) для минимизации влияния ручной сегментации на определение оценок PI-RADS.</p><p><bold>Материалы и методы</bold>. С января 2020 г. по май 2024 г. ретроспективно отобраны 108 пациентов с гистологически подтвержденным раком предстательной железы с оценками PI-RADS 4–5 для разработки модели и тренировки. Для валидации модели были включены 28 доброкачественных случаев. Различные зоны предстательной железы были помечены в соответствии с руководствами PI-RADS v2.1 для облегчения выбора модели. Ручная сегментация областей предстательной железы и поражений выполнена на T2-взвешенных последовательностях, и была реализована архитектура 3D U-Net для модели глубокого обучения с использованием фреймворка MONAI. Диагностическая эффективность оценивалась с помощью статистического анализа на Python.</p><p><bold>Результаты</bold>. Система DL-CAD показала среднюю точность 78 %, чувствительность 60 % и специфичность 84 % при обнаружении поражений. Коэффициент сходства Dice для сегментации предстательной железы составил 0,71, а площадь под ROC-кривой (AUROC) – 81,16 %.</p><p><bold>Заключение</bold>. Система DL-CAD демонстрирует перспективы для пациентов с клинически значимым раком предстательной железы за счет повышения диагностической точности. Несмотря на высокую специфичность, необходимы дальнейшие улучшения в чувствительности и точности сегментации. Эти улучшения могут быть достигнуты за счет использования более крупных наборов данных и передовых техник глубокого обучения, таких как трансферное обучение или ансамблевое обучение, которые могут повысить чувствительность без ущерба для специфичности. Требуется дальнейшая многоцентровая валидация для ускорения интеграции данной системы в клиническую практику.</p></trans-abstract><kwd-group xml:lang="en"><kwd>prostate cancer</kwd><kwd>multiparametric magnetic resonance imaging</kwd><kwd>artificial intelligence</kwd><kwd>deep learning</kwd><kwd>PI-RADS</kwd></kwd-group><kwd-group xml:lang="ru"><kwd>рак предстательной железы</kwd><kwd>мультипараметрическая магнитно-резонансная томография</kwd><kwd>искусственный интеллект</kwd><kwd>глубокое обучение</kwd><kwd>PI-RADS</kwd></kwd-group><funding-group/></article-meta></front><body></body><back><ref-list><ref id="B1"><label>1.</label><mixed-citation>Bray F., Ferlay J., Soerjomataram I. et al. Global cancer statistics 2018: GLOBOCAN estimates of incidence and mortality worldwide for 36 cancers in 185 countries. CA Cancer J Clin 2018;68(6):394–424. DOI: 10.3322/caac.21492</mixed-citation></ref><ref id="B2"><label>2.</label><mixed-citation>Bagheri H., Mahdavi S.R., Geramifar P. et al. An update on the role of mpMRI and (68)Ga-PSMA PET imaging in primary and recurrent prostate cancer. Clin Genitourin Cancer 2024;22(3):102076. DOI: 10.1016/j.clgc.2024.102076</mixed-citation></ref><ref id="B3"><label>3.</label><mixed-citation>Kaneko M., Sugano D., Lebastchi A.H. et al. Techniques and outcomes of MRI-TRUS fusion prostate biopsy. Curr Urol Rep 2021;22(4):27. DOI: 10.1007/s11934-021-01037-x</mixed-citation></ref><ref id="B4"><label>4.</label><mixed-citation>Weinreb J.C., Barentsz J.O., Choyke P.L. et al. PI-RADS prostate imaging – reporting and data system: 2015, Version 2. Eur Urol 2016;69(1):16–40. DOI: 10.1016/j.eururo.2015.08.052</mixed-citation></ref><ref id="B5"><label>5.</label><mixed-citation>Ahdoot M., Lebastchi A.H., Long L. et al. Using Prostate Imaging-Reporting and Data System (PI-RADS) scores to select an optimal prostate biopsy method: a secondary analysis of the Trio study. Eur Urol Oncol 2022;5(2):176–86. DOI: 10.1016/j.euo.2021.03.004</mixed-citation></ref><ref id="B6"><label>6.</label><mixed-citation>Padhani A.R., Barentsz J., Villeirs G. et al. PI-RADS Steering Committee: the PI-RADS multiparametric MRI and MRI-directed biopsy pathway. Radiology 2019;292(2):464–74. DOI: 10.1148/radiol.2019182946</mixed-citation></ref><ref id="B7"><label>7.</label><mixed-citation>Wen J., Liu W., Shen X., Hu W. PI-RADS v2.1 and PSAD for the prediction of clinically significant prostate cancer among patients with PSA levels of 4-10 ng/ml. Sci Rep 2024;14(1):6570. DOI: 10.1038/s41598-024-57337-y</mixed-citation></ref><ref id="B8"><label>8.</label><mixed-citation>He M., Cao Y., Chi C. et al. Research progress on deep learning in magnetic resonance imaging-based diagnosis and treatment of prostate cancer: a review on the current status and perspectives. Front Oncol 2023;13:1189370. DOI: 10.3389/fonc.2023.1189370</mixed-citation></ref><ref id="B9"><label>9.</label><mixed-citation>Smani S., Jalfon M., Sundaresan V. et al. Inter-reader reliability and diagnostic accuracy of PI-RADS scoring between academic and community care networks: how wide is the gap? Urol Oncol 2024;S1078-1439(24)00681-1. DOI: 10.1016/j.urolonc.2024.10.002</mixed-citation></ref><ref id="B10"><label>10.</label><mixed-citation>Savadjiev P., Chong J., Dohan A. et al. Demystification of AI-driven medical image interpretation: past, present and future. Eur Radiol 2019;29(3):1616–24. DOI: 10.1007/s00330-018-5674-x</mixed-citation></ref><ref id="B11"><label>11.</label><mixed-citation>Rouvière O., Jaouen T., Baseilhac P. et al. Artificial intelligence algorithms aimed at characterizing or detecting prostate cancer on MRI: How accurate are they when tested on independent cohorts? A systematic review. Diagn Interv Imaging 2023;104(5):221–34. DOI: 10.1016/j.diii.2022.11.005</mixed-citation></ref><ref id="B12"><label>12.</label><mixed-citation>Taye M.M. Understanding of machine learning with deep learning: architectures, workflow, applications and future directions. Computers 2023;12(5):91.</mixed-citation></ref><ref id="B13"><label>13.</label><mixed-citation>Alzubaidi L., Bai J., Al-Sabaawi A. et al. A survey on deep learning tools dealing with data scarcity: definitions, challenges, solutions, tips, and applications. J Big Data 2023;10(1):46. DOI: 10.1186/s40537-023-00727-2</mixed-citation></ref><ref id="B14"><label>14.</label><mixed-citation>Singh D., Kumar V., Das C.J. et al. Machine learning-based analysis of a semi-automated PI-RADS v2.1 scoring for prostate cancer. Front Oncol 2022;12:961985. DOI: 10.3389/fonc.2022.961985</mixed-citation></ref><ref id="B15"><label>15.</label><mixed-citation>Annamalai A., Fustok J.N., Beltran-Perez J. et al. Interobserver agreement and accuracy in interpreting mpMRI of the prostate: a systematic review. Curr Urol Rep 2022;23(1):1–10. DOI: 10.1007/s11934-022-01084-y</mixed-citation></ref><ref id="B16"><label>16.</label><mixed-citation>Min X., Li M., Dong D. et al. Multi-parametric MRI-based radiomics signature for discriminating between clinically significant and insignificant prostate cancer: cross-validation of a machine learning method. Eur J Radiol 2019;115:16–21. DOI: 10.1016/j.ejrad.2019.03.010</mixed-citation></ref><ref id="B17"><label>17.</label><mixed-citation>Liu Y., Zheng H., Liang Z. et al. Textured-based deep learning in prostate cancer classification with 3T multiparametric MRI: comparison with PI-RADS-based classification. Diagnostics (Basel) 2021;11(10):1785. DOI: 10.3390/diagnostics11101785</mixed-citation></ref><ref id="B18"><label>18.</label><mixed-citation>Aldoj N., Lukas S., Dewey M., Penzkofer T. Semi-automatic classification of prostate cancer on multi-parametric MR imaging using a multi-channel 3D convolutional neural network. Eur Radiol 2020;30(2):1243–53. DOI: 10.1007/s00330-019-06417-z</mixed-citation></ref><ref id="B19"><label>19.</label><mixed-citation>Saha A., Bosma J.S., Twilt J.J. et al. Artificial intelligence and radiologists in prostate cancer detection on MRI (PI-CAI): an international, paired, non-inferiority, confirmatory study. Lancet Oncol 2024;25(7):879–87. DOI: 10.1016/s1470-2045(24)00220-1</mixed-citation></ref><ref id="B20"><label>20.</label><mixed-citation>Cao R., Mohammadian Bajgiran A., Afshari Mirak S. et al. Joint prostate cancer detection and Gleason score prediction in mp-MRI via FocalNet. IEEE Trans Med Imaging 2019;38(11):2496–506. DOI: 10.1109/tmi.2019.2901928</mixed-citation></ref><ref id="B21"><label>21.</label><mixed-citation>Hoar D., Lee P.Q., Guida A. et al. Combined transfer learning and test-time augmentation improves convolutional neural network-based semantic segmentation of prostate cancer from multi-parametric MR images. Comput Methods Programs Biomed 2021;210:106375. DOI: 10.1016/j.cmpb.2021.106375</mixed-citation></ref></ref-list></back></article>
