

de Recherche et d’Innovation
en Cybersécurité et Société
Abdollahzadeh, S.; Allili, M. S.; Boulmerka, A.; Lapointe, J. -F.
A Vision-Based Framework for Safe Landing Zone Mapping of UAVs in Dynamic Environments Article de journal
Dans: IEEE Open Journal of the Computer Society, vol. 7, p. 492–503, 2026, ISSN: 26441268 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: Aerial vehicle, Air navigation, Aircraft detection, Aircraft landing, Antennas, automatic UAV navigation, Computer vision, Dynamic environments, Forecasting, Homographies, Landing zones, Learning systems, Motion tracking, Object detection, Object recognition, Object Tracking, object trajectory prediction, Robotics, Safe landing, Safe landing zone, safe landing zones (SLZ), Semantic segmentation, Semantics, Trajectories, Trajectory forecasting, Uncrewed aerial vehicles (UAVs), Unmanned aerial vehicle, Unmanned aerial vehicles (UAV)
@article{abdollahzadeh_vision-based_2026,
title = {A Vision-Based Framework for Safe Landing Zone Mapping of UAVs in Dynamic Environments},
author = {S. Abdollahzadeh and M. S. Allili and A. Boulmerka and J. -F. Lapointe},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105029942397&doi=10.1109%2FOJCS.2026.3663268&partnerID=40&md5=b11484e035458c84b1d3f6780b92c91c},
doi = {10.1109/OJCS.2026.3663268},
issn = {26441268 (ISSN)},
year = {2026},
date = {2026-01-01},
journal = {IEEE Open Journal of the Computer Society},
volume = {7},
pages = {492–503},
abstract = {Identification safe landing zones (SLZ) for Uncrewed Aerial Vehicles (UAVs) is important to ensure reliable and safe navigation, especially when they are operated in complex and safety-critical environments. However, this is a challenging task due to obstacles and UAV motion. This paper proposes a vision-based framework that maps SLZs in dynamic scenes by integrating several functionalities for analyzing visually static and dynamic aspects of a scene. Static analysis is achieved through context-aware segmentation which divides the image into thematic classes enabling to identify suitable landing surfaces (e.g., roads, grass). For dynamic content analysis, we combine object detection, tracking, and trajectory prediction to determine object occupancy and identify regions free of obstacles. Trajectory prediction is performed through a novel encoder–decoder architecture taking past object positions to predict the most likely future locations. To ensure stable and robust trajectory prediction, we introduce an optimized homography computation using multi-scale image analysis and cumulative updates to compensate UAV motion. We tested our framework on different operational scenarios, including urban and natural scenes with moving objects like vehicles and pedestrians. Obtained results demonstrate its strong performance, and its significant potential for enabling autonomous and safe UAV navigation. © 2020 IEEE.},
keywords = {Aerial vehicle, Air navigation, Aircraft detection, Aircraft landing, Antennas, automatic UAV navigation, Computer vision, Dynamic environments, Forecasting, Homographies, Landing zones, Learning systems, Motion tracking, Object detection, Object recognition, Object Tracking, object trajectory prediction, Robotics, Safe landing, Safe landing zone, safe landing zones (SLZ), Semantic segmentation, Semantics, Trajectories, Trajectory forecasting, Uncrewed aerial vehicles (UAVs), Unmanned aerial vehicle, Unmanned aerial vehicles (UAV)},
pubstate = {published},
tppubtype = {article}
}
Abdollahzadeh, S.; Allili, M. S.; Boulmerka, A.; Lapointe, J. -F.
Visual Safety Mapping for UAV Landings Using Ordinal Regression Networks Article de journal
Dans: IEEE Transactions on Artificial Intelligence, 2025, ISSN: 26914581 (ISSN).
Résumé | Liens | BibTeX | Étiquettes: automatic UAV navigation, deep ordinal regression, safe landing zones (SLZ), Semantic segmentation
@article{abdollahzadeh_visual_2025,
title = {Visual Safety Mapping for UAV Landings Using Ordinal Regression Networks},
author = {S. Abdollahzadeh and M. S. Allili and A. Boulmerka and J. -F. Lapointe},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-105023324811&doi=10.1109%2FTAI.2025.3635093&partnerID=40&md5=14d5d4e4558cf5f4db08bd7d2a61a945},
doi = {10.1109/TAI.2025.3635093},
issn = {26914581 (ISSN)},
year = {2025},
date = {2025-01-01},
journal = {IEEE Transactions on Artificial Intelligence},
abstract = {As Unmanned Aerial Vehicles (UAVs) see growing use in civilian applications, reliably identifying Safe Landing Zones (SLZs) in varied environments is essential for autonomous navigation and emergency response. Passive vision sensors offer a low-cost, lightweight solution for real-time terrain analysis and 3D scene reconstruction, making them ideal for onboard systems. We introduce OR-SLZNet, an original deep learning model based on ordinal regression to predict SLZs from UAV imagery. Unlike prior approaches, OR-SLZNet produces dense, multi-level safety maps by jointly leveraging photometric (e.g., color and texture) and geometric cues (e.g., flatness, slope, and depth), assigning each pixel an ordinal safety score that reflects landing suitability. With real-time inference (textasciitilde0.02s/frame), the model supports onboard deployment and rapid decision-making in time-critical situations. Extensive experiments on five diverse datasets demonstrate OR-SLZNet effectiveness and strong generalization across a wide range of structural complexities. © 2020 IEEE.},
keywords = {automatic UAV navigation, deep ordinal regression, safe landing zones (SLZ), Semantic segmentation},
pubstate = {published},
tppubtype = {article}
}



