
@article{ref1,
title="Deep learning applied to road accident detection with transfer learning and synthetic images",
journal="Transportation research procedia",
year="2022",
author="Tamagusko, Tiago and Correia, Matheus Gomes and Huynh, Minh Anh and Ferreira, Adelino",
volume="64",
number="",
pages="90-97",
abstract="Artificial Intelligence (AI) has driven solutions in diverse areas; one of the most prominent fields is Computer Vision (CV). Accordingly, solutions to effectively detect road accidents can be a game-changer for road safety. The focus is on Vision Zero, i.e., eliminate all traffic fatalities and severe injuries. However, when an accident occurs, response time is essential. Several countries have surveillance cameras monitoring the road network. However, real-time monitoring requires a considerable number of people with training and attention. Our solution automates this with AI, using deep learning to train a model to detect potential accidents. The CV method based on binary image classification was used, being images without accidents classified as normal and images with accidents as alarm. The biggest challenge in developing the model was obtaining images of accidents from the analyzed cameras. The solution adopted was to create synthetic images of these rare events. Several architectures of Convolutional Neural Networks were tested, and it was found that the best approach was to use transfer learning. As base models for transfer learning, the best results were EfficientNetB1 and MobileNetV2. The former for its quality in prediction and the latter for its size and execution speed. As a case study, open data images from Finnish road surveillance cameras were used, provided every ten minutes. Finally, it was found that the solution trained with EfficientNetB1 as the base model has a Mean Average Precision (mAP) of 0.89 and a Matthews Correlation Coefficient (MCC) of 0.77. The solution based on MobileNetV2 has an mAP of 0.88 and an MCC of 0.71.<p /> <p>Language: en</p>",
language="en",
issn="2352-1465",
doi="10.1016/j.trpro.2022.09.012",
url="http://dx.doi.org/10.1016/j.trpro.2022.09.012"
}