@inproceedings{ff68024c6cdd4188bed5b8e4a9990b98,
title = "LiDAR-Based Scene Understanding for Navigation in Unstructured Environments",
abstract = "Reliable scene understanding is crucial for autonomous off-road navigation. This work proposes a perception framework based on multiple LiDARs and odometry that is able to analyze a robot{\textquoteright}s environment to generate an occupancy grid map for the navigation task. A gradient-based approach separates obstacles and ground points. The exact position of negative obstacles (cliffs and holes) is corrected using geometrical relation. Then, obstacle points are used to create an occupancy grid map for the robot. Observing obstacles are propagated to the next frame to cover blinds spot in the sensor setup, and temporary misclassification and dynamic obstacles are handled using ground points. The proposed framework is tested on a robot with two LiDARs to evaluate the performance. The results show successful navigation in the presence of positive and negative obstacles.",
keywords = "Mobile Robots, Scene Understanding, Off-Road Navigation",
author = "{Didari Khamseh Motlagh}, Hamid and Gerald Steinbauer-Wagner",
year = "2023",
doi = "10.1007/978-3-031-32606-6_21",
language = "English",
isbn = "978-3-031-32605-9",
volume = "135",
series = "Mechanisms and Machine Science",
publisher = "Springer, Cham",
pages = "178--185",
editor = "Tadej Petri{\v c} and Ale{\v s} Ude and Leon {\v Z}lajpah",
booktitle = "Advances in Service and Industrial Robotics - RAAD 2023",
}