2026
Tan, Chenjiao; Li, Changying; Sun, Jin
Dense cotton boll counting with transformer-based video tracking and a customized phenotyping robot for data collection Journal Article
In: Computers and Electronics in Agriculture, vol. 240, pp. 111214, 2026, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: detection, Multi-Object Tracking, Optical flow, Point tracking, RT-DETR
@article{TAN2026111214,
title = {Dense cotton boll counting with transformer-based video tracking and a customized phenotyping robot for data collection},
author = {Chenjiao Tan and Changying Li and Jin Sun},
url = {https://www.sciencedirect.com/science/article/pii/S0168169925013201},
doi = {https://doi.org/10.1016/j.compag.2025.111214},
issn = {0168-1699},
year = {2026},
date = {2026-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {240},
pages = {111214},
abstract = {Accurately estimating the number of cotton bolls is vital for plant phenotyping, offering essential insights for both breeders and growers. This trait offers valuable phenotypic information on plant productivity and supports crop management decisions to optimize yield and profitability for growers. Manual counting of bolls in the field, however, is impractical because it is labor-intensive and time-consuming. This study presented a video-based cotton boll counting approach that integrated a transformer-based detector (RT-DETR) with multi-object tracking techniques. To prevent double-counting bolls across frames, two motion estimation methods, FlowFormer and TAPIR were explored to predict the movement of bolls between adjacent frames and a two-stage association process combining Intersection over Union (IoU) and Euclidean distances was developed to track bolls across time. To further enhance counting accuracy, a virtual counting line was introduced to reduce ID switch errors. Experimental results demonstrated the effectiveness of the RT-DETR model, achieving an mAP0.5 exceeding 0.93 for dense boll detection. Furthermore, both FlowFormer and TAPIR can be used for tracking cotton bolls in the videos while the tracking performance of the FlowFormer-based method was slightly higher than that of the TAPIR-based method with an MOTA of 73.36 % and an IDF1 of 79.89 %. The tracking approach integrating RT-DETR and FlowFormer exhibited a relatively strong correlation between the predicted and the ground-truth boll number with an R2 of 0.60 and an MAPE of 14.34 % on multi-plant plots. In single-plant plots, the approach achieved a high correlation with an R2 of 0.97 and a MAPE of 10.33%. These findings indicated the potential of the proposed approach as an effective, automated tool to support breeding programs and yield assessments in cotton production. Both the code and dataset can be accessed at: https://github.com/UGA-BSAIL/Dense_cotton_boll_counting.},
keywords = {detection, Multi-Object Tracking, Optical flow, Point tracking, RT-DETR},
pubstate = {published},
tppubtype = {article}
}
2025
Petti, Daniel; Li, Changying; Chee, Peng
Real-Time Multi-View Flower Counting With a Ground Mobile Robot Journal Article
In: Journal of Field Robotics, vol. 42, no. 8, pp. 1-27, 2025.
Abstract | Links | BibTeX | Tags: Computer Vision, cotton, Edge Computing, High-throughput phenotyping, mobile robot, Multi-Object Tracking, multi-view
@article{Petti2025a,
title = {Real-Time Multi-View Flower Counting With a Ground Mobile Robot},
author = {Daniel Petti and Changying Li and Peng Chee},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.70093},
doi = {https://doi.org/10.1002/rob.70093},
year = {2025},
date = {2025-01-01},
journal = {Journal of Field Robotics},
volume = {42},
number = {8},
pages = {1-27},
abstract = {ABSTRACT Although season-long cotton flowering time characterization has value to breeders and growers, a manual data collection process is too laborious to be practical in most cases. In recent years, several fully automated flower counting approaches have been proposed. However, such approaches are typically designed to run offline and require a significant amount of computation. Furthermore, little thought has gone into developing convenient interfaces and integrations so that a layperson can use such systems without extensive training. The goal of this study is to develop a flower tracking system that is deployable on a ground robot and can operate in real time. A previous GCNNMatch++ approach was modified to increase the inference speed. Additionally, data from multiple cameras were fused to avoid canopy occlusions, and three-dimensional flower locations were extracted by integrating GPS data from the robot. It is shown that the approach significantly outperforms UAV-based counting and single-camera counting while running at above 40 FPS on an edge device, achieving a counting error of 15. Overall, it is believed that the highly integrated, automated, and simplified flower counting solution makes significant strides toward a practical commercial cotton phenotyping platform.},
keywords = {Computer Vision, cotton, Edge Computing, High-throughput phenotyping, mobile robot, Multi-Object Tracking, multi-view},
pubstate = {published},
tppubtype = {article}
}
2024
Petti, Daniel; Zhu, Ronghang; Li, Sheng; Li, Changying
Graph Neural Networks for lightweight plant organ tracking Journal Article
In: Computers and Electronics in Agriculture, vol. 225, pp. 109294, 2024, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: Convolutional Neural Network, Graph Neural Network, High-throughput phenotyping, Machine vision, Multi-Object Tracking
@article{PETTI2024109294,
title = {Graph Neural Networks for lightweight plant organ tracking},
author = {Daniel Petti and Ronghang Zhu and Sheng Li and Changying Li},
url = {https://www.sciencedirect.com/science/article/pii/S0168169924006859},
doi = {https://doi.org/10.1016/j.compag.2024.109294},
issn = {0168-1699},
year = {2024},
date = {2024-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {225},
pages = {109294},
abstract = {Many specific problems within the domain of high throughput phenotyping require the accurate localization of plant organs. To track and count plant organs, we propose GCNNMatch++, a Graph Convolutional Neural Network (GCNN) that is capable of online tracking objects from videos. Based upon the GCNNMatch tracker with an improved CensNet GNN, our end-to-end tracking approach achieves fast inference. In order to adapt this approach to flower counting, we collected a large, high-quality dataset of cotton flower videos by leveraging our custom-built MARS-X robotic platform. Specifically, our system can count cotton flowers in the field with 80% accuracy, achieving a Higher-Order Tracking Accuracy (HOTA) of 51.09 and outperforming more generic tracking methods. Without any optimization (such as employing TensorRT), our association model runs in 44 ms on a central processing unit (CPU). On appropriate hardware, our model holds promise for achieving real-time counting performance when coupled with a fast detector. Overall, our approach is useful in counting cotton flowers and other relevant plant organs for both breeding programs and yield estimation.},
keywords = {Convolutional Neural Network, Graph Neural Network, High-throughput phenotyping, Machine vision, Multi-Object Tracking},
pubstate = {published},
tppubtype = {article}
}