2023
Lu, Guoyu; Li, Sheng; Mai, Gengchen; Sun, Jin; Zhu, Dajiang; Chai, Lilong; Sun, Haijian; Wang, Xianqiao; Dai, Haixing; Liu, Ninghao; Xu, Rui; Petti, Daniel; Li, Changying; Liu, Tianming; Li, Changying
AGI for Agriculture Journal Article
In: 2023.
Abstract | Links | BibTeX | Tags: 3D reconstruction, AGI, Deep convolutional neural network, deep learning, High-throughput phenotyping, object detection, phenotyping robot, robotics
@article{lu2023agi,
title = {AGI for Agriculture},
author = {Guoyu Lu and Sheng Li and Gengchen Mai and Jin Sun and Dajiang Zhu and Lilong Chai and Haijian Sun and Xianqiao Wang and Haixing Dai and Ninghao Liu and Rui Xu and Daniel Petti and Changying Li and Tianming Liu and Changying Li},
url = {https://arxiv.org/abs/2304.06136},
year = {2023},
date = {2023-04-12},
urldate = {2023-01-01},
abstract = {Artificial General Intelligence (AGI) is poised to revolutionize a variety of sectors, including healthcare, finance, transportation, and education. Within healthcare, AGI is being utilized to analyze clinical medical notes, recognize patterns in patient data, and aid in patient management. Agriculture is another critical sector that impacts the lives of individuals worldwide. It serves as a foundation for providing food, fiber, and fuel, yet faces several challenges, such as climate change, soil degradation, water scarcity, and food security. AGI has the potential to tackle these issues by enhancing crop yields, reducing waste, and promoting sustainable farming practices. It can also help farmers make informed decisions by leveraging real-time data, leading to more efficient and effective farm management. This paper delves into the potential future applications of AGI in agriculture, such as agriculture image processing, natural language processing (NLP), robotics, knowledge graphs, and infrastructure, and their impact on precision livestock and precision crops. By leveraging the power of AGI, these emerging technologies can provide farmers with actionable insights, allowing for optimized decision-making and increased productivity. The transformative potential of AGI in agriculture is vast, and this paper aims to highlight its potential to revolutionize the industry. },
keywords = {3D reconstruction, AGI, Deep convolutional neural network, deep learning, High-throughput phenotyping, object detection, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
2022
Tan, Chenjiao; Li, Changying; He, Dongjian; Song, Huaibo
Towards real-time tracking and counting of seedlings with a one-stage detector and optical flow Journal Article
In: Computers and Electronics in Agriculture, vol. 193, pp. 106683, 2022, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: Cotton seedling, Counting, Deep convolutional neural network, deep learning, machine learning, object detection, Optical flow
@article{TAN2022106683,
title = {Towards real-time tracking and counting of seedlings with a one-stage detector and optical flow},
author = {Chenjiao Tan and Changying Li and Dongjian He and Huaibo Song},
url = {https://www.sciencedirect.com/science/article/pii/S0168169921007006},
doi = {https://doi.org/10.1016/j.compag.2021.106683},
issn = {0168-1699},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {193},
pages = {106683},
abstract = {The population of crop seedlings is important for breeders and growers to evaluate the emergence rate of different cultivars and the necessity of replanting, but manual counting of plant seedlings is time-consuming and tedious. Building upon our prior work, we advanced the cotton seedling tracking method by incorporating a one-stage object detection deep neural network and optical flow to improve tracking speed and counting accuracy. Videos of cotton seedlings were captured using consumer-grade video cameras from the top view. You Only Look Once Version 4 (YOLOv4), a one-stage object detection network, was trained to detect cotton seedlings in each frame and to generate bounding boxes. To associate the same seedlings between adjacent frames, an optical flow-based tracking method was adopted to estimate camera motions. By comparing the positions of bounding boxes predicted by optical flow and detected by the YOLOv4 network in the same frame, the number of cotton seedlings was updated. The trained YOLOv4 model achieved high accuracy under conditions of occlusions, blurry images, complex backgrounds, and extreme illuminations. The F1 score of the final detection model was 0.98 and the average precision was 99.12%. Important tracking metrics were compared to evaluate the tracking performance. The Multiple-Object Tracking Accuracy (MOTA) and ID switch of the proposed tracking method were 72.8% and 0.1%, respectively. Counting results showed that the relative error of all testing videos was 3.13%. Compared with the Kalman filter and particle filter-based methods, our optical flow-based method generated fewer errors on testing videos because of higher accuracy of motion estimation. Compared with our previous work, the RMSE of the optical flow-based method decreased by 0.54 and the counting speed increased from 2.5 to 10.8 frames per second. The counting speed can reach 16.6 frames per second if the input resolution was reduced to 1280 × 720 pixels with an only 0.45% reduction in counting accuracy. The proposed method provides an automatic and near real-time tracking approach for counting of multiple cotton seedlings in video frames with improved speed and accuracy, which will benefit plant breeding and precision crop management.},
keywords = {Cotton seedling, Counting, Deep convolutional neural network, deep learning, machine learning, object detection, Optical flow},
pubstate = {published},
tppubtype = {article}
}