2026
Tan, Chenjiao; Li, Changying; Sun, Jin
Dense cotton boll counting with transformer-based video tracking and a customized phenotyping robot for data collection Journal Article
In: Computers and Electronics in Agriculture, vol. 240, pp. 111214, 2026, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: detection, Multi-Object Tracking, Optical flow, Point tracking, RT-DETR
@article{TAN2026111214,
title = {Dense cotton boll counting with transformer-based video tracking and a customized phenotyping robot for data collection},
author = {Chenjiao Tan and Changying Li and Jin Sun},
url = {https://www.sciencedirect.com/science/article/pii/S0168169925013201},
doi = {https://doi.org/10.1016/j.compag.2025.111214},
issn = {0168-1699},
year = {2026},
date = {2026-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {240},
pages = {111214},
abstract = {Accurately estimating the number of cotton bolls is vital for plant phenotyping, offering essential insights for both breeders and growers. This trait offers valuable phenotypic information on plant productivity and supports crop management decisions to optimize yield and profitability for growers. Manual counting of bolls in the field, however, is impractical because it is labor-intensive and time-consuming. This study presented a video-based cotton boll counting approach that integrated a transformer-based detector (RT-DETR) with multi-object tracking techniques. To prevent double-counting bolls across frames, two motion estimation methods, FlowFormer and TAPIR were explored to predict the movement of bolls between adjacent frames and a two-stage association process combining Intersection over Union (IoU) and Euclidean distances was developed to track bolls across time. To further enhance counting accuracy, a virtual counting line was introduced to reduce ID switch errors. Experimental results demonstrated the effectiveness of the RT-DETR model, achieving an mAP0.5 exceeding 0.93 for dense boll detection. Furthermore, both FlowFormer and TAPIR can be used for tracking cotton bolls in the videos while the tracking performance of the FlowFormer-based method was slightly higher than that of the TAPIR-based method with an MOTA of 73.36 % and an IDF1 of 79.89 %. The tracking approach integrating RT-DETR and FlowFormer exhibited a relatively strong correlation between the predicted and the ground-truth boll number with an R2 of 0.60 and an MAPE of 14.34 % on multi-plant plots. In single-plant plots, the approach achieved a high correlation with an R2 of 0.97 and a MAPE of 10.33%. These findings indicated the potential of the proposed approach as an effective, automated tool to support breeding programs and yield assessments in cotton production. Both the code and dataset can be accessed at: https://github.com/UGA-BSAIL/Dense_cotton_boll_counting.},
keywords = {detection, Multi-Object Tracking, Optical flow, Point tracking, RT-DETR},
pubstate = {published},
tppubtype = {article}
}
2025
Li, Zhengkun; Xu, Rui; Brown, Nino; Tillman, Barry L.; Li, Changying
Plot-scale peanut yield estimation using a phenotyping robot and transformer-based image analysis Journal Article
In: Smart Agricultural Technology, vol. 12, pp. 101154, 2025, ISSN: 2772-3755.
Abstract | Links | BibTeX | Tags: High-throughput phenotyping, Image stitching, Peanut, Pod detection, RT-DETR, Yield estimation
@article{LI2025101154,
title = {Plot-scale peanut yield estimation using a phenotyping robot and transformer-based image analysis},
author = {Zhengkun Li and Rui Xu and Nino Brown and Barry L. Tillman and Changying Li},
url = {https://www.sciencedirect.com/science/article/pii/S2772375525003867},
doi = {https://doi.org/10.1016/j.atech.2025.101154},
issn = {2772-3755},
year = {2025},
date = {2025-01-01},
journal = {Smart Agricultural Technology},
volume = {12},
pages = {101154},
abstract = {Peanuts rank as the seventh-largest crop in the United States with a farm gate value exceeding $1 billion. Conventional peanut yield estimation methods involve digging, harvesting, transporting, and weighing, which are labor-intensive and inefficient for large-scale research operations. This inefficiency is particularly pronounced in peanut breeding, which requires precise pod yield estimations of each plot in order to compare genetic potential for yield to select new, high-performing breeding lines. To improve efficiency and throughput for accelerating genetic improvement, we proposed an automated robotic imaging system to predict peanut yields in the field after digging and inversion of plots. A workflow was developed to estimate yield accurately across different genotypes by counting the pods from stitched plot-scale images. After the robotic scanning in the field, the sequential images of each peanut plot were stitched together using the Local Feature Transformer (LoFTR)-based feature matching and estimated translation between adjusted images, which avoided replicated pod counting in overlapped image regions. Additionally, the Real-Time Detection Transformer (RT-DETR) was customized for pod detection by integrating partial convolution into a lightweight ResNet-18 backbone and refining the up-sampling and down-sampling modules in cross-scale feature fusion. The customized detector achieved a mean Average Precision (mAP50) of 89.3% and a mAP95 of 55.0%, improving by 3.3% and 5.9% over the original RT-DETR model with lighter weights and less computation. To determine the number of pods within the stitched plot-scale image, a sliding window-based method was used to divide it into smaller patches to improve the accuracy of pod detection. In a case study of a total of 68 plots across 19 genotypes in a peanut breeding yield trial, the result presented a correlation (R2=0.47) between the yield and predicted pod count, better than the structure-from-motion (SfM) method. The yield ranking among different genotypes using image prediction achieved an average consistency of 84.8% with manual measurement. When the yield difference between two genotypes exceeded 12%, the consistency surpassed 90%. Overall, our robotic plot-scale peanut yield estimation workflow showed promise to replace the human measurement process, reducing the time and labor required for yield determination and improving the efficiency of peanut breeding.},
keywords = {High-throughput phenotyping, Image stitching, Peanut, Pod detection, RT-DETR, Yield estimation},
pubstate = {published},
tppubtype = {article}
}