2024
Rodriguez-Sanchez, Javier; Snider, John L.; Johnsen, Kyle; Li, Changying
Cotton morphological traits tracking through spatiotemporal registration of terrestrial laser scanning time-series data Journal Article
In: Frontiers in Plant Science, vol. 15, 2024, ISSN: 1664-462X.
Abstract | Links | BibTeX | Tags: agricultural robot, LiDAR, phenotyping robot, robotics
@article{10.3389/fpls.2024.1436120,
title = {Cotton morphological traits tracking through spatiotemporal registration of terrestrial laser scanning time-series data},
author = {Javier Rodriguez-Sanchez and John L. Snider and Kyle Johnsen and Changying Li},
url = {https://www.frontiersin.org/journals/plant-science/articles/10.3389/fpls.2024.1436120},
doi = {10.3389/fpls.2024.1436120},
issn = {1664-462X},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Frontiers in Plant Science},
volume = {15},
abstract = {<p>Understanding the complex interactions between genotype-environment dynamics is fundamental for optimizing crop improvement. However, traditional phenotyping methods limit assessments to the end of the growing season, restricting continuous crop monitoring. To address this limitation, we developed a methodology for spatiotemporal registration of time-series 3D point cloud data, enabling field phenotyping over time for accurate crop growth tracking. Leveraging multi-scan terrestrial laser scanning (TLS), we captured high-resolution 3D LiDAR data in a cotton breeding field across various stages of the growing season to generate four-dimensional (4D) crop models, seamlessly integrating spatial and temporal dimensions. Our registration procedure involved an initial pairwise terrain-based matching for rough alignment, followed by a bird’s-eye view adjustment for fine registration. Point clouds collected throughout nine sessions across the growing season were successfully registered both spatially and temporally, with average registration errors of approximately 3 cm. We used the generated 4D models to monitor canopy height (CH) and volume (CV) for eleven cotton genotypes over two months. The consistent height reference established via our spatiotemporal registration process enabled precise estimations of CH (<italic>R</italic>^{2} = 0.95},
keywords = {agricultural robot, LiDAR, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
2023
Lu, Guoyu; Li, Sheng; Mai, Gengchen; Sun, Jin; Zhu, Dajiang; Chai, Lilong; Sun, Haijian; Wang, Xianqiao; Dai, Haixing; Liu, Ninghao; Xu, Rui; Petti, Daniel; Li, Changying; Liu, Tianming; Li, Changying
AGI for Agriculture Journal Article
In: 2023.
Abstract | Links | BibTeX | Tags: 3D reconstruction, AGI, Deep convolutional neural network, deep learning, High-throughput phenotyping, object detection, phenotyping robot, robotics
@article{lu2023agi,
title = {AGI for Agriculture},
author = {Guoyu Lu and Sheng Li and Gengchen Mai and Jin Sun and Dajiang Zhu and Lilong Chai and Haijian Sun and Xianqiao Wang and Haixing Dai and Ninghao Liu and Rui Xu and Daniel Petti and Changying Li and Tianming Liu and Changying Li},
url = {https://arxiv.org/abs/2304.06136},
year = {2023},
date = {2023-04-12},
urldate = {2023-01-01},
abstract = {Artificial General Intelligence (AGI) is poised to revolutionize a variety of sectors, including healthcare, finance, transportation, and education. Within healthcare, AGI is being utilized to analyze clinical medical notes, recognize patterns in patient data, and aid in patient management. Agriculture is another critical sector that impacts the lives of individuals worldwide. It serves as a foundation for providing food, fiber, and fuel, yet faces several challenges, such as climate change, soil degradation, water scarcity, and food security. AGI has the potential to tackle these issues by enhancing crop yields, reducing waste, and promoting sustainable farming practices. It can also help farmers make informed decisions by leveraging real-time data, leading to more efficient and effective farm management. This paper delves into the potential future applications of AGI in agriculture, such as agriculture image processing, natural language processing (NLP), robotics, knowledge graphs, and infrastructure, and their impact on precision livestock and precision crops. By leveraging the power of AGI, these emerging technologies can provide farmers with actionable insights, allowing for optimized decision-making and increased productivity. The transformative potential of AGI in agriculture is vast, and this paper aims to highlight its potential to revolutionize the industry. },
keywords = {3D reconstruction, AGI, Deep convolutional neural network, deep learning, High-throughput phenotyping, object detection, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
2022
Xu, Rui; Li, Changying
A review of field-based high-throughput phenotyping systems: focusing on ground robots Journal Article
In: Plant Phenomics, vol. 2022, no. Article ID 9760269, pp. 20, 2022.
Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, phenotyping robot, review, robotics
@article{nokey,
title = {A review of field-based high-throughput phenotyping systems: focusing on ground robots},
author = {Rui Xu and Changying Li},
url = {https://spj.sciencemag.org/journals/plantphenomics/2022/9760269/},
doi = {https://doi.org/10.34133/2022/9760269.},
year = {2022},
date = {2022-06-18},
urldate = {2022-06-18},
journal = {Plant Phenomics},
volume = {2022},
number = {Article ID 9760269},
pages = {20},
keywords = {agricultural robot, High-throughput phenotyping, phenotyping robot, review, robotics},
pubstate = {published},
tppubtype = {article}
}
Xu, Rui; Li, Changying
A modular agricultural robotic system (MARS) for precision farming: Concept and implementation Journal Article
In: Journal of Field Robotics, vol. 39, no. 4, pp. 387-409, 2022.
Abstract | Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, phenotyping robot
@article{https://doi.org/10.1002/rob.22056,
title = {A modular agricultural robotic system (MARS) for precision farming: Concept and implementation},
author = {Rui Xu and Changying Li},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.22056},
doi = {https://doi.org/10.1002/rob.22056},
year = {2022},
date = {2022-01-01},
journal = {Journal of Field Robotics},
volume = {39},
number = {4},
pages = {387-409},
abstract = {Abstract Increasing global population, climate change, and shortage of labor pose significant challenges for meeting the global food and fiber demand, and agricultural robots offer a promising solution to these challenges. This paper presents a new robotic system architecture and the resulting modular agricultural robotic system (MARS) that is an autonomous, multi-purpose, and affordable robotic platform for in-field plant high throughput phenotyping and precision farming. There are five essential hardware modules (wheel module, connection module, robot controller, robot frame, and power module) and three optional hardware modules (actuation module, sensing module, and smart attachment). Various combinations of the hardware modules can create different robot configurations for specific agricultural tasks. The software was designed using the Robot Operating System (ROS) with three modules: control module, navigation module, and vision module. A robot localization method using dual Global Navigation Satellite System antennas was developed. Two line-following algorithms were implemented as the local planner for the ROS navigation stack. Based on the MARS design concept, two MARS designs were implemented: a low-cost, lightweight robotic system named MARS mini and a heavy-duty robot named MARS X. The autonomous navigation of both MARS X and mini was evaluated at different traveling speeds and payload levels, confirming satisfactory performances. The MARS X was further tested for its performance and navigation accuracy in a crop field, achieving a high accuracy over a 537 m long path with only 15% of the path having an error larger than 0.05 m. The MARS mini and MARS X were shown to be useful for plant phenotyping in two field tests. The modular design makes the robots easily adaptable to different agricultural tasks and the low-cost feature makes it affordable for researchers and growers.},
keywords = {agricultural robot, High-throughput phenotyping, phenotyping robot},
pubstate = {published},
tppubtype = {article}
}
2020
Iqbal, Jawad; Xu, Rui; Halloran, Hunter; Li, Changying
Development of a Multi-Purpose Autonomous Differential Drive Mobile Robot for Plant Phenotyping and Soil Sensing Journal Article
In: Electronics, vol. 9, no. 9, pp. 1550, 2020.
Links | BibTeX | Tags: agricultural robot, mobile, phenotyping robot, robotics
@article{iqbal2020maria,
title = {Development of a Multi-Purpose Autonomous Differential Drive Mobile Robot for Plant Phenotyping and Soil Sensing},
author = {Jawad Iqbal and Rui Xu and Hunter Halloran and Changying Li },
url = {https://www.mdpi.com/2079-9292/9/9/1550},
year = {2020},
date = {2020-09-15},
urldate = {2020-09-15},
journal = {Electronics},
volume = {9},
number = {9},
pages = {1550},
keywords = {agricultural robot, mobile, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
2017
Patrick, A.; Li, C.
High Throughput Phenotyping of Blueberry Bush Morphological Traits Using Unmanned Aerial Systems Journal Article
In: Remote Sensing, 9(12), 1250, 2017.
Abstract | Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, phenotyping robot, robotics
@article{Patrick2017,
title = {High Throughput Phenotyping of Blueberry Bush Morphological Traits Using Unmanned Aerial Systems},
author = {A. Patrick and C. Li
},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/High-Throughput-Phenotyping-of-Blueberry-Bush-Morphological-Traits-Using-Unmanned-Aerial-Systems.pdf},
doi = {10.3390/rs9121250},
year = {2017},
date = {2017-11-30},
urldate = {2017-11-30},
journal = {Remote Sensing, 9(12), 1250},
abstract = {Patrick, A., & Li, C. (2017). High throughput phenotyping of blueberry bush morphological traits using unmanned aerial systems. Remote Sensing, 9(12), 1250.
Phenotyping morphological traits of blueberry bushes in the field is important for selecting genotypes that are easily harvested by mechanical harvesters. Morphological data can also be used to assess the effects of crop treatments such as plant growth regulators, fertilizers, and environmental conditions. This paper investigates the feasibility and accuracy of an inexpensive unmanned aerial system in determining the morphological characteristics of blueberry bushes. Color images collected by a quadcopter are processed into three-dimensional point clouds via structure from motion algorithms. Bush height, extents, canopy area, and volume, in addition to crown diameter and width, are derived and referenced to ground truth. In an experimental farm, twenty-five bushes were imaged by a quadcopter. Height and width dimensions achieved a mean absolute error of 9.85 cm before and 5.82 cm after systematic under-estimation correction. Strong correlation was found between manual and image derived bush volumes and their traditional growth indices. Hedgerows of three Southern Highbush varieties were imaged at a commercial farm to extract five morphological features (base angle, blockiness, crown percent height, crown ratio, and vegetation ratio) associated with cultivation and machine harvestability. The bushes were found to be partially separable by multivariate analysis. The methodology developed from this study is not only valuable for plant breeders to screen genotypes with bush morphological traits that are suitable for machine harvest, but can also aid producers in crop management such as pruning and plot layout organization.},
keywords = {agricultural robot, High-throughput phenotyping, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
Phenotyping morphological traits of blueberry bushes in the field is important for selecting genotypes that are easily harvested by mechanical harvesters. Morphological data can also be used to assess the effects of crop treatments such as plant growth regulators, fertilizers, and environmental conditions. This paper investigates the feasibility and accuracy of an inexpensive unmanned aerial system in determining the morphological characteristics of blueberry bushes. Color images collected by a quadcopter are processed into three-dimensional point clouds via structure from motion algorithms. Bush height, extents, canopy area, and volume, in addition to crown diameter and width, are derived and referenced to ground truth. In an experimental farm, twenty-five bushes were imaged by a quadcopter. Height and width dimensions achieved a mean absolute error of 9.85 cm before and 5.82 cm after systematic under-estimation correction. Strong correlation was found between manual and image derived bush volumes and their traditional growth indices. Hedgerows of three Southern Highbush varieties were imaged at a commercial farm to extract five morphological features (base angle, blockiness, crown percent height, crown ratio, and vegetation ratio) associated with cultivation and machine harvestability. The bushes were found to be partially separable by multivariate analysis. The methodology developed from this study is not only valuable for plant breeders to screen genotypes with bush morphological traits that are suitable for machine harvest, but can also aid producers in crop management such as pruning and plot layout organization.
Patrick, A.; Pelham, S.; Culbreath, A.; Holbrook, C.; Godoy, I. J. d.; Li, C.
High Throughput Phenotyping of Tomato Spot Wilt Disease in Peanuts Using Unmanned Aerial Systems and Multispectral Imaging Journal Article
In: IEEE Instrumentation & Measurement Magazine, 20(3), 4-12, 2017.
Abstract | Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, phenotyping robot, robotics
@article{Patrick2017b,
title = {High Throughput Phenotyping of Tomato Spot Wilt Disease in Peanuts Using Unmanned Aerial Systems and Multispectral Imaging},
author = {A. Patrick and S. Pelham and A. Culbreath and C. Holbrook and I.J.d. Godoy and C. Li},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/High-Throughput-Phenotyping-of-Tomato-Spot-Wilt-Disease-in-Peanuts-Using-Unmanned-Aerial-Systems-and-Multispectral-Imaging.pdf},
doi = {10.1109/MIM.2017.7951684},
year = {2017},
date = {2017-02-08},
urldate = {2017-02-08},
journal = {IEEE Instrumentation & Measurement Magazine, 20(3), 4-12},
abstract = {Patrick, A., Pelham, S., Culbreath, A., Holbrook, C. C., De Godoy, I. J., & Li, C. (2017). High throughput phenotyping of tomato spot wilt disease in peanuts using unmanned aerial systems and multispectral imaging. IEEE Instrumentation & Measurement Magazine, 20(3), 4-12.
The amount of visible and near infrared light reflected by plants varies depending on their health. In this study, multispectral images were acquired by a quadcopter for high throughput phenotyping of tomato spot wilt disease resistance among twenty genotypes of peanuts. The plants were visually assessed to acquire ground truth ratings of disease incidence. Multispectral images were processed into several vegetation indices. The vegetation index image of each plot has a unique distribution of pixel intensities. The percentage and number of pixels above and below varying thresholds were extracted. These features were correlated with manually acquired data to develop a model for assessing the percentage of each plot diseased. Ultimately, the best vegetation indices and pixel distribution feature for disease detection were determined and correlated with manual ratings and yield. The relative resistance of each genotype was then compared. Image-based disease ratings effectively ranked genotype resistance as early as 93 days from seeding.},
keywords = {agricultural robot, High-throughput phenotyping, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
The amount of visible and near infrared light reflected by plants varies depending on their health. In this study, multispectral images were acquired by a quadcopter for high throughput phenotyping of tomato spot wilt disease resistance among twenty genotypes of peanuts. The plants were visually assessed to acquire ground truth ratings of disease incidence. Multispectral images were processed into several vegetation indices. The vegetation index image of each plot has a unique distribution of pixel intensities. The percentage and number of pixels above and below varying thresholds were extracted. These features were correlated with manually acquired data to develop a model for assessing the percentage of each plot diseased. Ultimately, the best vegetation indices and pixel distribution feature for disease detection were determined and correlated with manual ratings and yield. The relative resistance of each genotype was then compared. Image-based disease ratings effectively ranked genotype resistance as early as 93 days from seeding.