2025
Petti, Daniel; Li, Changying; Chee, Peng
Real-Time Multi-View Flower Counting With a Ground Mobile Robot Journal Article
In: Journal of Field Robotics, vol. 42, no. 8, pp. 1-27, 2025.
Abstract | Links | BibTeX | Tags: Computer Vision, cotton, Edge Computing, High-throughput phenotyping, mobile robot, Multi-Object Tracking, multi-view
@article{Petti2025a,
title = {Real-Time Multi-View Flower Counting With a Ground Mobile Robot},
author = {Daniel Petti and Changying Li and Peng Chee},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.70093},
doi = {https://doi.org/10.1002/rob.70093},
year = {2025},
date = {2025-01-01},
journal = {Journal of Field Robotics},
volume = {42},
number = {8},
pages = {1-27},
abstract = {ABSTRACT Although season-long cotton flowering time characterization has value to breeders and growers, a manual data collection process is too laborious to be practical in most cases. In recent years, several fully automated flower counting approaches have been proposed. However, such approaches are typically designed to run offline and require a significant amount of computation. Furthermore, little thought has gone into developing convenient interfaces and integrations so that a layperson can use such systems without extensive training. The goal of this study is to develop a flower tracking system that is deployable on a ground robot and can operate in real time. A previous GCNNMatch++ approach was modified to increase the inference speed. Additionally, data from multiple cameras were fused to avoid canopy occlusions, and three-dimensional flower locations were extracted by integrating GPS data from the robot. It is shown that the approach significantly outperforms UAV-based counting and single-camera counting while running at above 40 FPS on an edge device, achieving a counting error of 15. Overall, it is believed that the highly integrated, automated, and simplified flower counting solution makes significant strides toward a practical commercial cotton phenotyping platform.},
keywords = {Computer Vision, cotton, Edge Computing, High-throughput phenotyping, mobile robot, Multi-Object Tracking, multi-view},
pubstate = {published},
tppubtype = {article}
}
ABSTRACT Although season-long cotton flowering time characterization has value to breeders and growers, a manual data collection process is too laborious to be practical in most cases. In recent years, several fully automated flower counting approaches have been proposed. However, such approaches are typically designed to run offline and require a significant amount of computation. Furthermore, little thought has gone into developing convenient interfaces and integrations so that a layperson can use such systems without extensive training. The goal of this study is to develop a flower tracking system that is deployable on a ground robot and can operate in real time. A previous GCNNMatch++ approach was modified to increase the inference speed. Additionally, data from multiple cameras were fused to avoid canopy occlusions, and three-dimensional flower locations were extracted by integrating GPS data from the robot. It is shown that the approach significantly outperforms UAV-based counting and single-camera counting while running at above 40 FPS on an edge device, achieving a counting error of 15. Overall, it is believed that the highly integrated, automated, and simplified flower counting solution makes significant strides toward a practical commercial cotton phenotyping platform.