<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE root>
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" article-type="other" dtd-version="1.2" xml:lang="en"><front><journal-meta><journal-id journal-id-type="publisher-id">Digital Diagnostics</journal-id><journal-title-group><journal-title xml:lang="en">Digital Diagnostics</journal-title><trans-title-group xml:lang="ru"><trans-title>Digital Diagnostics</trans-title></trans-title-group><trans-title-group xml:lang="zh"><trans-title>Digital Diagnostics</trans-title></trans-title-group></journal-title-group><issn publication-format="print">2712-8490</issn><issn publication-format="electronic">2712-8962</issn><publisher><publisher-name xml:lang="en">Eco-Vector</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">693591</article-id><article-id pub-id-type="doi">10.17816/DD693591</article-id><article-categories><subj-group subj-group-type="toc-heading" xml:lang="en"><subject>Original Study Articles</subject></subj-group><subj-group subj-group-type="toc-heading" xml:lang="ru"><subject>Оригинальные исследования</subject></subj-group><subj-group subj-group-type="toc-heading" xml:lang="zh"><subject>原创性科研成果</subject></subj-group><subj-group subj-group-type="article-type"><subject></subject></subj-group></article-categories><title-group><article-title xml:lang="en">Machine learning methods for recognizing surgical site infection in trauma and orthopedic patients</article-title><trans-title-group xml:lang="ru"><trans-title>Сравнение методов машинного обучения для распознавания инфекции области хирургического вмешательства у пациентов травматолого-ортопедического профиля</trans-title></trans-title-group><trans-title-group xml:lang="zh"><trans-title/></trans-title-group></title-group><contrib-group><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-1314-2887</contrib-id><contrib-id contrib-id-type="spin">1402-5186</contrib-id><name-alternatives><name xml:lang="en"><surname>Nazarenko</surname><given-names>Anton G.</given-names></name><name xml:lang="ru"><surname>Назаренко</surname><given-names>Антон Герасимович</given-names></name><name xml:lang="zh"><surname></surname><given-names></given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Corresponding Member of the Russian Academy of Sciences, MD, Dr. Sci. (Medicine), professor of RAS</p></bio><bio xml:lang="ru"><p>член-корреспондент РАН, д-р мед. наук, профессор РАН</p></bio><email>nazarenkoag@cito-priorov.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-8745-6195</contrib-id><contrib-id contrib-id-type="spin">2037-7164</contrib-id><name-alternatives><name xml:lang="en"><surname>Kleimenova</surname><given-names>Elena B.</given-names></name><name xml:lang="ru"><surname>Клеймёнова</surname><given-names>Елена Борисовна</given-names></name><name xml:lang="zh"><surname></surname><given-names></given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>MD, Dr. Sci. (Medicine), рrofessor</p></bio><bio xml:lang="ru"><p>д-р мед. наук, профессор</p></bio><email>KleymenovaEB@cito-priorov.ru</email><xref ref-type="aff" rid="aff2"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-0039-943X</contrib-id><contrib-id contrib-id-type="spin">3378-7234</contrib-id><name-alternatives><name xml:lang="en"><surname>Molodchenkov</surname><given-names>Alexey I.</given-names></name><name xml:lang="ru"><surname>Молодченков</surname><given-names>Алексей Игоревич</given-names></name><name xml:lang="zh"><surname></surname><given-names></given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Cand. Sci. (Engineering)</p></bio><bio xml:lang="ru"><p>канд. тех. наук</p></bio><email>aim@isa.ru</email><xref ref-type="aff" rid="aff3"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0001-8938-2321</contrib-id><contrib-id contrib-id-type="spin">7686-2123</contrib-id><name-alternatives><name xml:lang="en"><surname>Gorbatyuk</surname><given-names>Dmitry S.</given-names></name><name xml:lang="ru"><surname>Горбатюк</surname><given-names>Дмитрий Сергеевич</given-names></name><name xml:lang="zh"><surname></surname><given-names></given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>MD, Cand. Sci. (Medicine)</p></bio><bio xml:lang="ru"><p>канд. мед. наук</p></bio><email>gorbatyukds@cito-priorov.ru</email></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0009-0003-2778-0561</contrib-id><name-alternatives><name xml:lang="en"><surname>Enikeev</surname><given-names>Azat D.</given-names></name><name xml:lang="ru"><surname>Еникеев</surname><given-names>Азат Дамирович</given-names></name><name xml:lang="zh"><surname></surname><given-names></given-names></name></name-alternatives><email>azatmag@mail.ru</email></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-0189-3539</contrib-id><contrib-id contrib-id-type="spin">2985-2951</contrib-id><name-alternatives><name xml:lang="en"><surname>Kislyakov</surname><given-names>Valery A.</given-names></name><name xml:lang="ru"><surname>Кисляков</surname><given-names>Валерий Александрович</given-names></name><name xml:lang="zh"><surname></surname><given-names></given-names></name></name-alternatives><bio xml:lang="en"><p>MD, Dr. Sci. (Medicine), Рrofessor</p></bio><bio xml:lang="ru"><p>доктор медицинских наук, профессор</p></bio><email>vakislakov@mail.ru</email></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-1357-0056</contrib-id><contrib-id contrib-id-type="spin">1910-0484</contrib-id><name-alternatives><name xml:lang="en"><surname>Yashina</surname><given-names>Liubov P.</given-names></name><name xml:lang="ru"><surname>Яшина</surname><given-names>Любовь Петровна</given-names></name><name xml:lang="zh"><surname></surname><given-names></given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Cand. Sci. (Biol.)</p></bio><bio xml:lang="ru"><p>канд. биол. наук</p></bio><email>YashinaLP@cito-priorov.ru</email><xref ref-type="aff" rid="aff8"/></contrib></contrib-group><aff-alternatives id="aff1"><aff><institution xml:lang="en">N.N. Priorov National Medical Research Center of Traumatology and Orthopaedics</institution></aff><aff><institution xml:lang="ru">Национальный медицинский исследовательский центр травматологии и ортопедии им. Н.Н. Приорова</institution></aff><aff><institution xml:lang="zh"></institution></aff></aff-alternatives><aff-alternatives id="aff2"><aff><institution xml:lang="en">Priorov National Medical Research Center for Traumatology and Orthopedics</institution></aff><aff><institution xml:lang="ru">Национальный медицинский исследовательский центр травматологии и ортопедии им. Н.Н. Приорова</institution></aff><aff><institution xml:lang="zh"></institution></aff></aff-alternatives><aff-alternatives id="aff3"><aff><institution xml:lang="en">Federal Research Center «Computer Science and Control» of the Russian Academy of Sciences</institution></aff><aff><institution xml:lang="ru">Федеральный исследовательский центр «Информатика и управление» РАН</institution></aff><aff><institution xml:lang="zh"></institution></aff></aff-alternatives><aff-alternatives id="aff4"><aff><institution xml:lang="en">Peoples’ Friendship University of Russia</institution></aff><aff><institution xml:lang="ru">Российский университет дружбы народов</institution></aff><aff><institution xml:lang="zh"></institution></aff></aff-alternatives><aff-alternatives id="aff5"><aff><institution xml:lang="en">Priorov Central Research Institute of Traumatology and Orthopedics</institution></aff><aff><institution xml:lang="ru">Национальный медицинский исследовательский центр травматологии и ортопедии им. Н.Н. Приорова</institution></aff><aff><institution xml:lang="zh"></institution></aff></aff-alternatives><aff-alternatives id="aff6"><aff><institution xml:lang="en">N.N. Priorov National Medical Research Center for Traumatology and Orthopedics</institution></aff><aff><institution xml:lang="ru">ФГБУ "Национальный медицинский исследовательский центр травматологии и ортопедии имени Н.Н. Приорова" МЗ РФ</institution></aff><aff><institution xml:lang="zh"></institution></aff></aff-alternatives><aff-alternatives id="aff7"><aff><institution xml:lang="en">A.K. Yeramishantsev City Clinical Hospital, Peoples’ Friendship University of Russia named after Patrice Lumumba</institution></aff><aff><institution xml:lang="ru">ГБУЗ «Городская клиническая больница им. А.К. Ерамишанцева» ДЗМ,Российский университет дружбы народов имени Патриса Лумумбы</institution></aff><aff><institution xml:lang="zh"></institution></aff></aff-alternatives><aff-alternatives id="aff8"><aff><institution xml:lang="en">Priorov National Medical Research Center for Traumatology and Orthopedics</institution></aff><aff><institution xml:lang="ru">Национальный медицинский исследовательский центр травматологии и ортопедии им. Н.Н. Приорова</institution></aff></aff-alternatives><pub-date date-type="preprint" iso-8601-date="2026-02-26" publication-format="electronic"><day>26</day><month>02</month><year>2026</year></pub-date><volume>7</volume><issue>1</issue><issue-title xml:lang="ru"/><history><date date-type="received" iso-8601-date="2025-10-17"><day>17</day><month>10</month><year>2025</year></date><date date-type="accepted" iso-8601-date="2025-12-14"><day>14</day><month>12</month><year>2025</year></date></history><permissions><copyright-statement xml:lang="en">Copyright ©; , Eco-Vector</copyright-statement><copyright-statement xml:lang="ru">Copyright ©; , Эко-вектор</copyright-statement><copyright-statement xml:lang="zh">Copyright ©; , Eco-Vector</copyright-statement><copyright-holder xml:lang="en">Eco-Vector</copyright-holder><copyright-holder xml:lang="ru">Эко-вектор</copyright-holder><copyright-holder xml:lang="zh">Eco-Vector</copyright-holder><ali:free_to_read xmlns:ali="http://www.niso.org/schemas/ali/1.0/"/><license><ali:license_ref xmlns:ali="http://www.niso.org/schemas/ali/1.0/">https://creativecommons.org/licenses/by-nc-nd/4.0</ali:license_ref></license></permissions><self-uri xlink:href="https://jdigitaldiagnostics.com/DD/article/view/693591">https://jdigitaldiagnostics.com/DD/article/view/693591</self-uri><abstract xml:lang="en"><p><bold><italic>BACKGROUND</italic></bold><bold>:</bold><bold><italic> </italic></bold>Surgical site infections (SSIs) are common postoperative complications that often develop after hospital discharge. Timely diagnosis and optimal treatment choice are crucial for clinical success and cost-effectiveness in SSI treatment. Computer vision and artificial intelligence (AI) methods have demonstrated their effectiveness in analyzing chronic wounds, but their applicability to postoperative wound assessment remains poorly understood.</p> <p><bold><italic>AIM</italic></bold><bold>:</bold> A comparative analysis of performance metrics for various machine learning methods in classifying surgical wounds images for the SSI presence or absence in trauma and orthopedic patients.</p> <p><bold><italic>METHODS</italic></bold><bold>:</bold> The sample included 512 surgical wound images (292 with SSI and 220 without infection) obtained from 298 patients aged 18 years and older admitted to the N.N. Priorov National Medical Research Center of Traumatology and Orthopedics or the Purulent Surgery Department of the A.K. Eramishantsev City Clinical Hospital. After applying augmentation methods, approximately 2,500 images were obtained. The following machine learning algorithms were used for infection recognition: support vector machines (SVM), linear regression (LR), random forests (RF), convolutional neural networks (VGG16+CNN), and a model with an attention mechanism (YOLO 11s-cls).</p> <p><bold><italic>RESULTS</italic></bold><bold>:</bold> The YOLO 11s-cls model demonstrated the best metrics on the test set: sensitivity 91.2%, accuracy 91%; F1-score 89%; balanced accuracy 90.6%. For the other models, sensitivity ranged from 69.6% (RF) to 87% (VGG16+CNN), accuracy 68% (RF) to 85% (VGG16+CNN), and F1-score from 66% (RF) to 83% (VGG16+CNN).</p> <p><bold><italic>CONCLUSION</italic>: </bold>The study results confirmed the potential of AI appropriateness for remote monitoring of surgical wounds in trauma and orthopedic patients. The developed models can be used to create a multimodal system for assessing and monitoring wound infection after surgical interventions.</p></abstract><trans-abstract xml:lang="ru"><p><bold>Обоснование</bold>: Инфекции области хирургического вмешательства (ИОХВ) – это распространённые послеоперационные осложнения, которые нередко развиваются после выписки пациента из стационара. Своевременная диагностика и выбор оптимальной тактики лечения — залог клинического успеха и экономической эффективности лечения ИОХВ. Методы компьютерного зрения и искусственного интеллекта (ИИ) продемонстрировали свою эффективность для анализа хронических кожных повреждений, но их применимость для оценки послеоперационных ран остается малоизученной.</p> <p><bold>Цель исследования</bold>: сравнительный анализ точностных метрик различных моделей машинного обучения при решении задачи классификации изображений хирургических ран на наличие и отсутствие ИОХВ у пациентов травматолого-ортопедического профиля.</p> <p><bold>Методы</bold>: Выборка включала 512 изображения хирургических ран (292 с ИОХВ и 220 без инфекции), полученных от 298 пациентов 18 лет и старше, поступивших в стационар ФГБУ «НМИЦ травматологии и ортопедии им. Н.Н. Приорова» Минздрава России или отделение гнойной хирургии ГБУЗ «ГКБ им. А.К. Ерамишанцева» ДЗМ. После применения методов аугментации было получено около 2,5 тысяч изображений. Для распознавания инфекции были использованы модели машинного обучения: метод опорных векторов (SVM), линейной регрессии (LR), случайного леса (RF), свёрточные нейронные сети (VGG16+CNN) и модель с механизмом внимания (YOLO 11s-cls).</p> <p><bold>Результаты:</bold> Лучшие метрики на тестовой выборке имела модель YOLO 11s-cls: чувствительность 91,2%, точность 91%; F1-score 89%. Для остальных моделей чувствительность варьировалась от 69,6% (RF) до 87% (VGG16+CNN), точность 68% (RF) до 85% (VGG16+CNN), F1-score — от 66% (RF) до 83% (VGG16+CNN).</p> <p><bold>Заключение</bold>: Результаты исследования подтвердили перспективность использования ИИ для дистанционного мониторинга состояния хирургических ран у пациентов травматолого-ортопедического профиля. Разработанные модели могут быть использованы для создания мультимодальной системы оценки и мониторинга раневой инфекции после хирургических вмешательств.</p></trans-abstract><trans-abstract xml:lang="zh"><p/></trans-abstract><kwd-group xml:lang="en"><kwd>artificial intelligence, machine learning, image recognition, surgical site infection</kwd></kwd-group><kwd-group xml:lang="ru"><kwd>искусственный интеллект, машинное обучение, распознавание изображений, инфекция области хирургического вмешательства</kwd></kwd-group><funding-group><award-group><funding-source><institution-wrap><institution xml:lang="ru">Российский научный фонд</institution></institution-wrap><institution-wrap><institution xml:lang="en">Russian Foundation for Basic Research</institution></institution-wrap></funding-source><award-id>грант №24-14-00310</award-id></award-group></funding-group></article-meta></front><body></body><back><ref-list><ref id="B1"><label>1.</label><mixed-citation>Monahan M, Jowett S, Pinkney T, et al. Surgical site infection and costs in low- and middle-income countries: A systematic review of the economic burden. PLoS One. 2020; 15 (6): e0232960. DOI: 10.1371/journal.pone.0232960</mixed-citation></ref><ref id="B2"><label>2.</label><mixed-citation>Woelber E, Schrick EJ, Gessner BD, Evans HL. Proportion of surgical site infections occurring after hospital discharge: a systematic review. Surg. Infect. 2016; 17: 510–519. DOI: 10.1089/sur.2015.241</mixed-citation></ref><ref id="B3"><label>3.</label><mixed-citation>Cieza A, Causey K, Kamenov K, et al. Global estimates of the need for rehabilitation based on the Global Burden of Disease study 2019: a systematic analysis for the Global Burden of Disease Study 2019. The Lancet. 2020; 396 (10267): 2006-2017. DOI: 10.1016/S0140-6736(20)32340-0</mixed-citation></ref><ref id="B4"><label>4.</label><mixed-citation>GBD 2021 Osteoarthritis Collaborators. Global, regional, and national burden of osteoarthritis, 1990-2020 and projections to 2050: a systematic analysis for the Global Burden of Disease Study 2021. Lancet Rheumatol. 2023; 5(9): e508-e522. DOI: 10.1016/S2665-9913(23)00163-7.</mixed-citation></ref><ref id="B5"><label>5.</label><mixed-citation>Reifs Jiménez D, Casanova-Lozano L, Grau-Carrión S, et al. Artificial intelligence methods for diagnostic and decision-making assistance in chronic wounds: a systematic review. J Med Syst. 2025; 49, 29. DOI: 10.1007/s10916-025-02153-8</mixed-citation></ref><ref id="B6"><label>6.</label><mixed-citation>Ganesan O, Morris MX, Guo L, Orgilln D. A review of artificial intelligence in wound care. Art Int. Surg. 2024; 4: 364-75 DOI: 10.20517/ais.2024.68</mixed-citation></ref><ref id="B7"><label>7.</label><mixed-citation>Veredas FJ, Luque-Baena RM, Martín-Santos FJ, et al. Wound image evaluation with machine learning. Neurocomputing 2015; 164: 112–122 DOI: 10.1016/j.neucom.2014.12.091</mixed-citation></ref><ref id="B8"><label>8.</label><mixed-citation>Curti N, Merli Y, Zengarini C, et al. Effectiveness of semi-supervised active learning in automated wound image segmentation. Int J Mo. Sci. 2023; 24, 1–11. DOI: 10.3390/ijms24010706</mixed-citation></ref><ref id="B9"><label>9.</label><mixed-citation>Alzubaidi L, Fadhel MA, Al-Shamma O, et al. Towards a better understanding of transfer learning for medical imaging: A case study. Applied Sci. 2020; 10, 1–21 DOI: 10.3390/app10134523</mixed-citation></ref><ref id="B10"><label>10.</label><mixed-citation>Aldughayfiq B, Ashfaq F, Jhanjhi NZ, et al. YOLO-based deep learning model for pressure ulcer detection and classification. Healthcare. 2023; 11, 1–19. DOI: 10.3390/healthcare11091222</mixed-citation></ref><ref id="B11"><label>11.</label><mixed-citation>Bazargani M, Heidari MJ, Anvari-Fard M, Soltanian-Zadeh H. Unified wound detection and segmentation using YOLO: an efficient approach for accurate wound measurement. IFMBE Proc. 2025; 131: 506–515. DOI: 10.1007/978-3-031-96538-8_43</mixed-citation></ref><ref id="B12"><label>12.</label><mixed-citation>Amin J, Sharif M, Anjum MA, et al. An integrated design for classification and localization of diabetic foot ulcer based on CNN and YOLOv2-DFU models. IEEE Access. 2020; 8, 228586–228597 DOI: 10.1109/ACCESS.2020.3045732</mixed-citation></ref><ref id="B13"><label>13.</label><mixed-citation>TRIPOD+AI statement: updated guidance for reporting clinical prediction models that use regression or machine learning methods. BMJ. 2024; 385: q902. DOI: 10.1136/bmj.q902.</mixed-citation></ref><ref id="B14"><label>14.</label><mixed-citation>Prevention of surgical site infections. Moscow: National Association of Specialists in the Control of Infectious and Non-Infectious Diseases (NASCI); 2023. 71 p. (In Russ.) EDN: IVBGFB.</mixed-citation></ref><ref id="B15"><label>15.</label><mixed-citation>Mascarenhas S, Agarwal M. A comparison between VGG16, VGG19 and ResNet50 architecture frameworks for image classification. 2021 Int. Conf. on Disruptive Technologies for Multi-Disciplinary Research and Applications (CENTCON). IEEE, 2021; 1: 96-99. DOI: 10.1109/CENTCON52345.2021.9687944</mixed-citation></ref><ref id="B16"><label>16.</label><mixed-citation>Yang H, Ni J, Gao J, et al. A novel method for peanut variety identification and classification by improved VGG16. Sci Rep. 2021; 11: 15756. DOI: 10.1038/s41598-021-95240-y</mixed-citation></ref><ref id="B17"><label>17.</label><mixed-citation>ImageNet [Internet; cited 05.09.2025] Available from: URL:https://www.image-net.org/</mixed-citation></ref><ref id="B18"><label>18.</label><mixed-citation>Chaganti SY, Nanda I, Pandi KR, et al. Image classification using SVM and CNN. 2020 Int. Conf. Computer Sci., Engineering and Applications (ICCSEA). IEEE, 2020: 1-5. DOI: 10.1109/ICCSEA49143.2020.9132851</mixed-citation></ref><ref id="B19"><label>19.</label><mixed-citation>Yang W, Ricanek K, Shen F. Image classification using local linear regression. Neur Comp Appl. 2014; 25 (7): 1913-1920. DOI: 10.1007/s00521-014-1681-2</mixed-citation></ref><ref id="B20"><label>20.</label><mixed-citation>Sheykhmousa M, Mahdianpari M, Ghanbari, H. et al. Support vector machine versus random forest for remote sensing image classification: a meta-analysis and systematic review. IEEE J. 2020; 13: 6308-6325. DOI: 10.1109/JSTARS.2020.3026724</mixed-citation></ref><ref id="B21"><label>21.</label><mixed-citation>Torrey L, Shavlik J. Transfer learning. Handbook of research on machine learning applications and trends: algorithms, methods, and techniques. IGI Global Sci. Publ., 2010: 242-264. DOI: 10.4018/978-1-60566-766-9.ch011</mixed-citation></ref><ref id="B22"><label>22.</label><mixed-citation>Yolo [Internet; cited 05.09.2025] Available from: URL:https://docs.ultralytics.com/ru/tasks/classify/</mixed-citation></ref><ref id="B23"><label>23.</label><mixed-citation>Han K, Wang Y, Chen H, et al. A survey on vision transformer. IEEE Trans. Pattern Anal. Mach. Intell. 2022; 45 (1): 87-110. DOI: 10.1109/TPAMI.2022.3152247</mixed-citation></ref><ref id="B24"><label>24.</label><mixed-citation>Rajalekshmi J, Sharma A, Patil GK, et al. Beyond bandages: customized first aid for different wound types. Medicon Med. Sci. 2024; 6 (3): 28-34. DOI: 10.55162/MCMS.06.199</mixed-citation></ref><ref id="B25"><label>25.</label><mixed-citation>Jacob NV, Sowmya V, Gopalakrishnan EA, et al. Automatic wound detection system for multi-ethnic populations using YOLO. Health Inform. Med. Syst. Biomed. Engin. 2025; 270–281. DOI: 10.1007/978-3-031-85908-3_23</mixed-citation></ref><ref id="B26"><label>26.</label><mixed-citation>Rochon M, Tanner J, Jurkiewicz J, et al. Wound imaging software and digital platform to assist review of surgical wounds using patient smartphones: The development and evaluation of artificial intelligence (WISDOM AI study). PLoS ONE. 2024; 19 (12): e0315384. DOI: 10.1371/journal.pone.0315384</mixed-citation></ref><ref id="B27"><label>27.</label><mixed-citation>Muaddi H, Choudhary A, Lee F, et al. Imaging-based surgical site infection detection using artificial intelligence. Ann Surg. 2025; 282: 419–428. DOI: 10.1097/SLA.0000000000006826</mixed-citation></ref><ref id="B28"><label>28.</label><mixed-citation>Šín P, Hokynková A, Marie N, et al. Machine learning-based pressure ulcer prediction in modular critical care data. Diagnostics. 2022; 12 (4): 850. DOI: 10.3390/diagnostics12040850</mixed-citation></ref><ref id="B29"><label>29.</label><mixed-citation>Wu JM, Tsai CJ, Ho TW, et al. A unified framework for automatic detection of wound infection with artificial intelligence. Applied Sci. 2020; 10: 5353. DOI: 10.3390/APP10155353</mixed-citation></ref><ref id="B30"><label>30.</label><mixed-citation>Hsu JT, Chen YW, Ho TW, et al. Chronic wound assessment and infection detection method. BMC Med Inform Decis Mak. 2019; 19 (1): 99. doi: 10.1186/s12911-019-0813-0</mixed-citation></ref></ref-list></back></article>
