2024
Liu, David; Li, Zhengkun; Wu, Zihao; Li, Changying
Digital Twin/MARS-CycleGAN: Enhancing Sim-to-Real Crop/Row Detection for MARS Phenotyping Robot Using Synthetic Images Journal Article
In: Journal of Field Robotics, vol. n/a, no. n/a, 2024.
Abstract | Links | BibTeX | Tags: digital twin, field-based robotic phenotyping, object detection, sim-to-real transfer, zero-shot
@article{Liu,
title = {Digital Twin/MARS-CycleGAN: Enhancing Sim-to-Real Crop/Row Detection for MARS Phenotyping Robot Using Synthetic Images},
author = {David Liu and Zhengkun Li and Zihao Wu and Changying Li},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.22473},
doi = {https://doi.org/10.1002/rob.22473},
year = {2024},
date = {2024-12-10},
journal = {Journal of Field Robotics},
volume = {n/a},
number = {n/a},
abstract = {ABSTRACT Robotic crop phenotyping has emerged as a key technology for assessing crops' phenotypic traits at scale, which is essential for developing new crop varieties with the aim of increasing productivity and adapting to the changing climate. However, developing and deploying crop phenotyping robots faces many challenges, such as complex and variable crop shapes that complicate robotic object detection, dynamic and unstructured environments that confound robotic control, and real-time computing and managing big data that challenge robotic hardware/software. This work specifically addresses the first challenge by proposing a novel Digital Twin(DT)/MARS-CycleGAN model for image augmentation to improve our Modular Agricultural Robotic System (MARS)'s crop object detection from complex and variable backgrounds. The core idea is that in addition to the cycle consistency losses in the CycleGAN model, we designed and enforced a new DT/MARS loss in the deep learning model to penalize the inconsistency between real crop images captured by MARS and synthesized images generated by DT/MARS-CycleGAN. Therefore, the synthesized crop images closely mimic real images in terms of realism, and they are employed to fine-tune object detectors such as YOLOv8. Extensive experiments demonstrate that the new DT/MARS-CycleGAN framework significantly boosts crop/row detection performance for MARS, contributing to the field of robotic crop phenotyping. We release our code and data to the research community (https://github.com/UGA-BSAIL/DT-MARS-CycleGAN).},
keywords = {digital twin, field-based robotic phenotyping, object detection, sim-to-real transfer, zero-shot},
pubstate = {published},
tppubtype = {article}
}
2023
Lu, Guoyu; Li, Sheng; Mai, Gengchen; Sun, Jin; Zhu, Dajiang; Chai, Lilong; Sun, Haijian; Wang, Xianqiao; Dai, Haixing; Liu, Ninghao; Xu, Rui; Petti, Daniel; Li, Changying; Liu, Tianming; Li, Changying
AGI for Agriculture Journal Article
In: 2023.
Abstract | Links | BibTeX | Tags: 3D reconstruction, AGI, Deep convolutional neural network, deep learning, High-throughput phenotyping, object detection, phenotyping robot, robotics
@article{lu2023agi,
title = {AGI for Agriculture},
author = {Guoyu Lu and Sheng Li and Gengchen Mai and Jin Sun and Dajiang Zhu and Lilong Chai and Haijian Sun and Xianqiao Wang and Haixing Dai and Ninghao Liu and Rui Xu and Daniel Petti and Changying Li and Tianming Liu and Changying Li},
url = {https://arxiv.org/abs/2304.06136},
year = {2023},
date = {2023-04-12},
urldate = {2023-01-01},
abstract = {Artificial General Intelligence (AGI) is poised to revolutionize a variety of sectors, including healthcare, finance, transportation, and education. Within healthcare, AGI is being utilized to analyze clinical medical notes, recognize patterns in patient data, and aid in patient management. Agriculture is another critical sector that impacts the lives of individuals worldwide. It serves as a foundation for providing food, fiber, and fuel, yet faces several challenges, such as climate change, soil degradation, water scarcity, and food security. AGI has the potential to tackle these issues by enhancing crop yields, reducing waste, and promoting sustainable farming practices. It can also help farmers make informed decisions by leveraging real-time data, leading to more efficient and effective farm management. This paper delves into the potential future applications of AGI in agriculture, such as agriculture image processing, natural language processing (NLP), robotics, knowledge graphs, and infrastructure, and their impact on precision livestock and precision crops. By leveraging the power of AGI, these emerging technologies can provide farmers with actionable insights, allowing for optimized decision-making and increased productivity. The transformative potential of AGI in agriculture is vast, and this paper aims to highlight its potential to revolutionize the industry. },
keywords = {3D reconstruction, AGI, Deep convolutional neural network, deep learning, High-throughput phenotyping, object detection, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
Tan, Chenjiao; Li, Changying; He, Dongjian; Song, Huaibo
Anchor-free deep convolutional neural network for tracking and counting cotton seedlings and flowers Journal Article
In: Computers and Electronics in Agriculture, vol. 215, pp. 108359, 2023, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: Anchor free, CNN, Counting, Deep convolutional network, High-throughput phenotyping, object detection, Plant and plant organ, Tracking
@article{Tan2023a,
title = {Anchor-free deep convolutional neural network for tracking and counting cotton seedlings and flowers},
author = {Chenjiao Tan and Changying Li and Dongjian He and Huaibo Song},
url = {https://www.sciencedirect.com/science/article/pii/S0168169923007470},
doi = {https://doi.org/10.1016/j.compag.2023.108359},
issn = {0168-1699},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {215},
pages = {108359},
abstract = {Accurate counting of plants and their organs in natural environments is essential for breeders and growers. For breeders, counting plants during the seedling stage aids in selecting genotypes with superior emergence rates, while for growers, it informs decisions about potential replanting. Meanwhile, counting specific plant organs, such as flowers, forecasts yields for different genotypes, offering insights into production levels. The overall goal of this study was to investigate a deep convolutional neural network-based tracking method, CenterTrack, for cotton seedling and flower counting from video frames. The network is extended from a customized CenterNet, which is an anchor-free object detector. CenterTrack predicts the detections of the current frame and displacements of detections between the previous frame and the current frame, which are used to associate the same object in consecutive frames. The modified CenterNet detector achieved high accuracy on both seedling and flower datasets with an overall AP50 of 0.962. The video tracking hyperparameters were optimized for each dataset using orthogonal tests. Experimental results showed that seedling and flower counts with optimized hyperparameters highly correlated with those of manual counts (R2 = 0.98 andR2 = 0.95) and the mean relative errors of 75 cotton seedling testing videos and 50 flower testing videos were 5.5 % and 10.8 %, respectively. An average counting speed of 20.4 frames per second was achieved with an input resolution of 1920 × 1080 pixels for both seedling and flower videos. The anchor-free deep convolution neural network-based tracking method provides automatic tracking and counting in video frames, which will significantly benefit plant breeding and crop management.},
keywords = {Anchor free, CNN, Counting, Deep convolutional network, High-throughput phenotyping, object detection, Plant and plant organ, Tracking},
pubstate = {published},
tppubtype = {article}
}
2022
Tan, Chenjiao; Li, Changying; He, Dongjian; Song, Huaibo
Towards real-time tracking and counting of seedlings with a one-stage detector and optical flow Journal Article
In: Computers and Electronics in Agriculture, vol. 193, pp. 106683, 2022, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: Cotton seedling, Counting, Deep convolutional neural network, deep learning, machine learning, object detection, Optical flow
@article{TAN2022106683,
title = {Towards real-time tracking and counting of seedlings with a one-stage detector and optical flow},
author = {Chenjiao Tan and Changying Li and Dongjian He and Huaibo Song},
url = {https://www.sciencedirect.com/science/article/pii/S0168169921007006},
doi = {https://doi.org/10.1016/j.compag.2021.106683},
issn = {0168-1699},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {193},
pages = {106683},
abstract = {The population of crop seedlings is important for breeders and growers to evaluate the emergence rate of different cultivars and the necessity of replanting, but manual counting of plant seedlings is time-consuming and tedious. Building upon our prior work, we advanced the cotton seedling tracking method by incorporating a one-stage object detection deep neural network and optical flow to improve tracking speed and counting accuracy. Videos of cotton seedlings were captured using consumer-grade video cameras from the top view. You Only Look Once Version 4 (YOLOv4), a one-stage object detection network, was trained to detect cotton seedlings in each frame and to generate bounding boxes. To associate the same seedlings between adjacent frames, an optical flow-based tracking method was adopted to estimate camera motions. By comparing the positions of bounding boxes predicted by optical flow and detected by the YOLOv4 network in the same frame, the number of cotton seedlings was updated. The trained YOLOv4 model achieved high accuracy under conditions of occlusions, blurry images, complex backgrounds, and extreme illuminations. The F1 score of the final detection model was 0.98 and the average precision was 99.12%. Important tracking metrics were compared to evaluate the tracking performance. The Multiple-Object Tracking Accuracy (MOTA) and ID switch of the proposed tracking method were 72.8% and 0.1%, respectively. Counting results showed that the relative error of all testing videos was 3.13%. Compared with the Kalman filter and particle filter-based methods, our optical flow-based method generated fewer errors on testing videos because of higher accuracy of motion estimation. Compared with our previous work, the RMSE of the optical flow-based method decreased by 0.54 and the counting speed increased from 2.5 to 10.8 frames per second. The counting speed can reach 16.6 frames per second if the input resolution was reduced to 1280 × 720 pixels with an only 0.45% reduction in counting accuracy. The proposed method provides an automatic and near real-time tracking approach for counting of multiple cotton seedlings in video frames with improved speed and accuracy, which will benefit plant breeding and precision crop management.},
keywords = {Cotton seedling, Counting, Deep convolutional neural network, deep learning, machine learning, object detection, Optical flow},
pubstate = {published},
tppubtype = {article}
}