Robotics (both ground and aerial) and machine learning (e.g. deep learning) are expected to dramatically change our work and life. Plant sciences and in particular agriculture is one of the most important fields where the two technologies would have a significant impact. Agricultural robots can assist (or replace) humans to work in harsh field conditions and regions with limited labor. We are developing custom robots and robotic networks with machine learning capabilities for various agricultural tasks such as phenotyping, production management (e.g. weeding and pruning), and harvesting. With the advent of the big data era, machine learning techniques will help transform the way we observe and understand plants and crops. Our lab has developed a technique to use images from the unmanned aerial systems and convolutional neural networks to count cotton flowers.
Papers
2023
Lu, Guoyu; Li, Sheng; Mai, Gengchen; Sun, Jin; Zhu, Dajiang; Chai, Lilong; Sun, Haijian; Wang, Xianqiao; Dai, Haixing; Liu, Ninghao; Xu, Rui; Petti, Daniel; Li, Changying; Liu, Tianming; Li, Changying
AGI for Agriculture Journal Article
In: 2023.
@article{lu2023agi,
title = {AGI for Agriculture},
author = {Guoyu Lu and Sheng Li and Gengchen Mai and Jin Sun and Dajiang Zhu and Lilong Chai and Haijian Sun and Xianqiao Wang and Haixing Dai and Ninghao Liu and Rui Xu and Daniel Petti and Changying Li and Tianming Liu and Changying Li},
url = {https://arxiv.org/abs/2304.06136},
year = {2023},
date = {2023-04-12},
urldate = {2023-01-01},
abstract = {Artificial General Intelligence (AGI) is poised to revolutionize a variety of sectors, including healthcare, finance, transportation, and education. Within healthcare, AGI is being utilized to analyze clinical medical notes, recognize patterns in patient data, and aid in patient management. Agriculture is another critical sector that impacts the lives of individuals worldwide. It serves as a foundation for providing food, fiber, and fuel, yet faces several challenges, such as climate change, soil degradation, water scarcity, and food security. AGI has the potential to tackle these issues by enhancing crop yields, reducing waste, and promoting sustainable farming practices. It can also help farmers make informed decisions by leveraging real-time data, leading to more efficient and effective farm management. This paper delves into the potential future applications of AGI in agriculture, such as agriculture image processing, natural language processing (NLP), robotics, knowledge graphs, and infrastructure, and their impact on precision livestock and precision crops. By leveraging the power of AGI, these emerging technologies can provide farmers with actionable insights, allowing for optimized decision-making and increased productivity. The transformative potential of AGI in agriculture is vast, and this paper aims to highlight its potential to revolutionize the industry. },
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Saeed, Farah; Sun, Shangpeng; Rodriguez-Sanchez, Javier; Snider, John; Liu, Tianming; Li, Changying
Cotton plant part 3D segmentation and architectural trait extraction using point voxel convolutional neural networks Journal Article
In: Plant Methods, vol. 19, no. 1, pp. 33, 2023, ISSN: 1746-4811.
@article{Saeed2023,
title = {Cotton plant part 3D segmentation and architectural trait extraction using point voxel convolutional neural networks},
author = {Farah Saeed and Shangpeng Sun and Javier Rodriguez-Sanchez and John Snider and Tianming Liu and Changying Li},
url = {https://doi.org/10.1186/s13007-023-00996-1},
doi = {10.1186/s13007-023-00996-1},
issn = {1746-4811},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Plant Methods},
volume = {19},
number = {1},
pages = {33},
abstract = {Plant architecture can influence crop yield and quality. Manual extraction of architectural traits is, however, time-consuming, tedious, and error prone. The trait estimation from 3D data addresses occlusion issues with the availability of depth information while deep learning approaches enable learning features without manual design. The goal of this study was to develop a data processing workflow by leveraging 3D deep learning models and a novel 3D data annotation tool to segment cotton plant parts and derive important architectural traits.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Herr, Andrew W.; Adak, Alper; Carroll, Matthew E.; Elango, Dinakaran; Kar, Soumyashree; Li, Changying; Jones, Sarah E.; Carter, Arron H.; Murray, Seth C.; Paterson, Andrew; Sankaran, Sindhuja; Singh, Arti; Singh, Asheesh K.
Unoccupied aerial systems imagery for phenotyping in cotton, maize, soybean, and wheat breeding Journal Article
In: Crop Science, vol. 63, no. 4, pp. 1722-1749, 2023.
@article{https://doi.org/10.1002/csc2.21028,
title = {Unoccupied aerial systems imagery for phenotyping in cotton, maize, soybean, and wheat breeding},
author = {Andrew W. Herr and Alper Adak and Matthew E. Carroll and Dinakaran Elango and Soumyashree Kar and Changying Li and Sarah E. Jones and Arron H. Carter and Seth C. Murray and Andrew Paterson and Sindhuja Sankaran and Arti Singh and Asheesh K. Singh},
url = {https://acsess.onlinelibrary.wiley.com/doi/abs/10.1002/csc2.21028},
doi = {https://doi.org/10.1002/csc2.21028},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Crop Science},
volume = {63},
number = {4},
pages = {1722-1749},
abstract = {Abstract High-throughput phenotyping (HTP) with unoccupied aerial systems (UAS), consisting of unoccupied aerial vehicles (UAV; or drones) and sensor(s), is an increasingly promising tool for plant breeders and researchers. Enthusiasm and opportunities from this technology for plant breeding are similar to the emergence of genomic tools ∼30 years ago, and genomic selection more recently. Unlike genomic tools, HTP provides a variety of strategies in implementation and utilization that generate big data on the dynamic nature of plant growth formed by temporal interactions between growth and environment. This review lays out strategies deployed across four major staple crop species: cotton (Gossypium hirsutum L.), maize (Zea mays L.), soybean (Glycine max L.), and wheat (Triticum aestivum L.). Each crop highlighted in this review demonstrates how UAS-collected data are employed to automate and improve estimation or prediction of objective phenotypic traits. Each crop section includes four major topics: (a) phenotyping of routine traits, (b) phenotyping of previously infeasible traits, (c) sample cases of UAS application in breeding, and (d) implementation of phenotypic and phenomic prediction and selection. While phenotyping of routine agronomic and productivity traits brings advantages in time and resource optimization, the most potentially beneficial application of UAS data is in collecting traits that were previously difficult or impossible to quantify, improving selection efficiency of important phenotypes. In brief, UAS sensor technology can be used for measuring abiotic stress, biotic stress, crop growth and development, as well as productivity. These applications and the potential implementation of machine learning strategies allow for improved prediction, selection, and efficiency within breeding programs, making UAS HTP a potentially indispensable asset.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2022
Xu, Rui; Li, Changying
A review of field-based high-throughput phenotyping systems: focusing on ground robots Journal Article
In: Plant Phenomics, vol. 2022, no. Article ID 9760269, pp. 20, 2022.
@article{nokey,
title = {A review of field-based high-throughput phenotyping systems: focusing on ground robots},
author = {Rui Xu and Changying Li},
url = {https://spj.sciencemag.org/journals/plantphenomics/2022/9760269/},
doi = {https://doi.org/10.34133/2022/9760269.},
year = {2022},
date = {2022-06-18},
urldate = {2022-06-18},
journal = {Plant Phenomics},
volume = {2022},
number = {Article ID 9760269},
pages = {20},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rodriguez-Sanchez, Javier; Li, Changying; Paterson, Andrew
Cotton yield estimation from aerial imagery using machine learning approaches Journal Article
In: Frontiers in Plant Science, vol. 13, 2022.
@article{nokey,
title = {Cotton yield estimation from aerial imagery using machine learning approaches},
author = {Javier Rodriguez-Sanchez and Changying Li and Andrew Paterson},
url = {https://www.frontiersin.org/articles/10.3389/fpls.2022.870181/full},
year = {2022},
date = {2022-04-01},
urldate = {2022-04-01},
journal = {Frontiers in Plant Science},
volume = {13},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Petti, Daniel; Li, Changying
Weakly-supervised learning to automatically count cotton flowers from aerial imagery Journal Article
In: Computers and Electronics in Agriculture, vol. 194, pp. 106734, 2022, ISSN: 0168-1699.
@article{Petti2022,
title = {Weakly-supervised learning to automatically count cotton flowers from aerial imagery},
author = {Daniel Petti and Changying Li},
url = {https://www.sciencedirect.com/science/article/pii/S0168169922000515},
doi = {https://doi.org/10.1016/j.compag.2022.106734},
issn = {0168-1699},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {194},
pages = {106734},
abstract = {Counting plant flowers is a common task with applications for estimating crop yields and selecting favorable genotypes. Typically, this requires a laborious manual process, rendering it impractical to obtain accurate flower counts throughout the growing season. The model proposed in this study uses weak supervision, based on Convolutional Neural Networks (CNNs), which automates such a counting task for cotton flowers using imagery collected from an unmanned aerial vehicle (UAV). Furthermore, the model is trained using Multiple Instance Learning (MIL) in order to reduce the required amount of annotated data. MIL is a binary classification task in which any image with at least one flower falls into the positive class, and all others are negative. In the process, a novel loss function was developed that is designed to improve the performance of image-processing models that use MIL. The model is trained on a large dataset of cotton plant imagery which was collected over several years and will be made publicly available. Additionally, an active-learning-based approach is employed in order to generate the annotations for the dataset while minimizing the required amount of human intervention. Despite having minimal supervision, the model still demonstrates good performance on the testing dataset. Multiple models were tested with different numbers of parameters and input sizes, achieving a minimum average absolute count error of 2.43. Overall, this study demonstrates that a weakly-supervised model is a promising method for solving the flower counting problem while minimizing the human labeling effort.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Xu, Rui; Li, Changying
A modular agricultural robotic system (MARS) for precision farming: Concept and implementation Journal Article
In: Journal of Field Robotics, vol. 39, no. 4, pp. 387-409, 2022.
@article{https://doi.org/10.1002/rob.22056,
title = {A modular agricultural robotic system (MARS) for precision farming: Concept and implementation},
author = {Rui Xu and Changying Li},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.22056},
doi = {https://doi.org/10.1002/rob.22056},
year = {2022},
date = {2022-01-01},
journal = {Journal of Field Robotics},
volume = {39},
number = {4},
pages = {387-409},
abstract = {Abstract Increasing global population, climate change, and shortage of labor pose significant challenges for meeting the global food and fiber demand, and agricultural robots offer a promising solution to these challenges. This paper presents a new robotic system architecture and the resulting modular agricultural robotic system (MARS) that is an autonomous, multi-purpose, and affordable robotic platform for in-field plant high throughput phenotyping and precision farming. There are five essential hardware modules (wheel module, connection module, robot controller, robot frame, and power module) and three optional hardware modules (actuation module, sensing module, and smart attachment). Various combinations of the hardware modules can create different robot configurations for specific agricultural tasks. The software was designed using the Robot Operating System (ROS) with three modules: control module, navigation module, and vision module. A robot localization method using dual Global Navigation Satellite System antennas was developed. Two line-following algorithms were implemented as the local planner for the ROS navigation stack. Based on the MARS design concept, two MARS designs were implemented: a low-cost, lightweight robotic system named MARS mini and a heavy-duty robot named MARS X. The autonomous navigation of both MARS X and mini was evaluated at different traveling speeds and payload levels, confirming satisfactory performances. The MARS X was further tested for its performance and navigation accuracy in a crop field, achieving a high accuracy over a 537 m long path with only 15% of the path having an error larger than 0.05 m. The MARS mini and MARS X were shown to be useful for plant phenotyping in two field tests. The modular design makes the robots easily adaptable to different agricultural tasks and the low-cost feature makes it affordable for researchers and growers.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Tan, Chenjiao; Li, Changying; He, Dongjian; Song, Huaibo
Towards real-time tracking and counting of seedlings with a one-stage detector and optical flow Journal Article
In: Computers and Electronics in Agriculture, vol. 193, pp. 106683, 2022, ISSN: 0168-1699.
@article{TAN2022106683,
title = {Towards real-time tracking and counting of seedlings with a one-stage detector and optical flow},
author = {Chenjiao Tan and Changying Li and Dongjian He and Huaibo Song},
url = {https://www.sciencedirect.com/science/article/pii/S0168169921007006},
doi = {https://doi.org/10.1016/j.compag.2021.106683},
issn = {0168-1699},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {193},
pages = {106683},
abstract = {The population of crop seedlings is important for breeders and growers to evaluate the emergence rate of different cultivars and the necessity of replanting, but manual counting of plant seedlings is time-consuming and tedious. Building upon our prior work, we advanced the cotton seedling tracking method by incorporating a one-stage object detection deep neural network and optical flow to improve tracking speed and counting accuracy. Videos of cotton seedlings were captured using consumer-grade video cameras from the top view. You Only Look Once Version 4 (YOLOv4), a one-stage object detection network, was trained to detect cotton seedlings in each frame and to generate bounding boxes. To associate the same seedlings between adjacent frames, an optical flow-based tracking method was adopted to estimate camera motions. By comparing the positions of bounding boxes predicted by optical flow and detected by the YOLOv4 network in the same frame, the number of cotton seedlings was updated. The trained YOLOv4 model achieved high accuracy under conditions of occlusions, blurry images, complex backgrounds, and extreme illuminations. The F1 score of the final detection model was 0.98 and the average precision was 99.12%. Important tracking metrics were compared to evaluate the tracking performance. The Multiple-Object Tracking Accuracy (MOTA) and ID switch of the proposed tracking method were 72.8% and 0.1%, respectively. Counting results showed that the relative error of all testing videos was 3.13%. Compared with the Kalman filter and particle filter-based methods, our optical flow-based method generated fewer errors on testing videos because of higher accuracy of motion estimation. Compared with our previous work, the RMSE of the optical flow-based method decreased by 0.54 and the counting speed increased from 2.5 to 10.8 frames per second. The counting speed can reach 16.6 frames per second if the input resolution was reduced to 1280 × 720 pixels with an only 0.45% reduction in counting accuracy. The proposed method provides an automatic and near real-time tracking approach for counting of multiple cotton seedlings in video frames with improved speed and accuracy, which will benefit plant breeding and precision crop management.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Adke, Shrinidhi; Li, Changying; Rasheed, Khaled M.; Maier, Frederick W.
Supervised and Weakly Supervised Deep Learning for Segmentation and Counting of Cotton Bolls Using Proximal Imagery Journal Article
In: Sensors, vol. 22, no. 10, 2022, ISSN: 1424-8220.
@article{Adke2022,
title = {Supervised and Weakly Supervised Deep Learning for Segmentation and Counting of Cotton Bolls Using Proximal Imagery},
author = {Shrinidhi Adke and Changying Li and Khaled M. Rasheed and Frederick W. Maier},
url = {https://www.mdpi.com/1424-8220/22/10/3688},
doi = {10.3390/s22103688},
issn = {1424-8220},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Sensors},
volume = {22},
number = {10},
abstract = {The total boll count from a plant is one of the most important phenotypic traits for cotton breeding and is also an important factor for growers to estimate the final yield. With the recent advances in deep learning, many supervised learning approaches have been implemented to perform phenotypic trait measurement from images for various crops, but few studies have been conducted to count cotton bolls from field images. Supervised learning models require a vast number of annotated images for training, which has become a bottleneck for machine learning model development. The goal of this study is to develop both fully supervised and weakly supervised deep learning models to segment and count cotton bolls from proximal imagery. A total of 290 RGB images of cotton plants from both potted (indoor and outdoor) and in-field settings were taken by consumer-grade cameras and the raw images were divided into 4350 image tiles for further model training and testing. Two supervised models (Mask R-CNN and S-Count) and two weakly supervised approaches (WS-Count and CountSeg) were compared in terms of boll count accuracy and annotation costs. The results revealed that the weakly supervised counting approaches performed well with RMSE values of 1.826 and 1.284 for WS-Count and CountSeg, respectively, whereas the fully supervised models achieve RMSE values of 1.181 and 1.175 for S-Count and Mask R-CNN, respectively, when the number of bolls in an image patch is less than 10. In terms of data annotation costs, the weakly supervised approaches were at least 10 times more cost efficient than the supervised approach for boll counting. In the future, the deep learning models developed in this study can be extended to other plant organs, such as main stalks, nodes, and primary and secondary branches. Both the supervised and weakly supervised deep learning models for boll counting with low-cost RGB images can be used by cotton breeders, physiologists, and growers alike to improve crop breeding and yield estimation.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2021
Xu, Rui; Li, Changying; Bernardes, Sergio
Development and Testing of a UAV-Based Multi-Sensor System for Plant Phenotyping and Precision Agriculture Journal Article
In: Remote Sensing, vol. 13, no. 17, 2021, ISSN: 2072-4292.
@article{Xu2021,
title = {Development and Testing of a UAV-Based Multi-Sensor System for Plant Phenotyping and Precision Agriculture},
author = {Rui Xu and Changying Li and Sergio Bernardes},
url = {https://www.mdpi.com/2072-4292/13/17/3517},
doi = {10.3390/rs13173517},
issn = {2072-4292},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {Remote Sensing},
volume = {13},
number = {17},
abstract = {Unmanned aerial vehicles have been used widely in plant phenotyping and precision agriculture. Several critical challenges remain, however, such as the lack of cross-platform data acquisition software system, sensor calibration protocols, and data processing methods. This paper developed an unmanned aerial system that integrates three cameras (RGB, multispectral, and thermal) and a LiDAR sensor. Data acquisition software supporting data recording and visualization was implemented to run on the Robot Operating System. The design of the multi-sensor unmanned aerial system was open sourced. A data processing pipeline was proposed to preprocess the raw data and to extract phenotypic traits at the plot level, including morphological traits (canopy height, canopy cover, and canopy volume), canopy vegetation index, and canopy temperature. Protocols for both field and laboratory calibrations were developed for the RGB, multispectral, and thermal cameras. The system was validated using ground data collected in a cotton field. Temperatures derived from thermal images had a mean absolute error of 1.02 °C, and canopy NDVI had a mean relative error of 6.6% compared to ground measurements. The observed error for maximum canopy height was 0.1 m. The results show that the system can be useful for plant breeding and precision crop management.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ni, Xueping; Li, Changying; Jiang, Huanyu; Takeda, Fumiomi
Three-dimensional photogrammetry with deep learning instance segmentation to extract berry fruit harvestability traits Journal Article
In: ISPRS Journal of Photogrammetry and Remote Sensing, vol. 171, pp. 297-309, 2021, ISSN: 0924-2716.
@article{NI2021297,
title = {Three-dimensional photogrammetry with deep learning instance segmentation to extract berry fruit harvestability traits},
author = {Xueping Ni and Changying Li and Huanyu Jiang and Fumiomi Takeda},
url = {https://www.sciencedirect.com/science/article/pii/S0924271620303178},
doi = {https://doi.org/10.1016/j.isprsjprs.2020.11.010},
issn = {0924-2716},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {ISPRS Journal of Photogrammetry and Remote Sensing},
volume = {171},
pages = {297-309},
abstract = {Fruit cluster characteristics such as compactness, maturity, berry number, and berry size, are important phenotypic traits associated with harvestability and yield of blueberry genotypes and can be used to monitor berry development and improve crop management. The goal of this study was to develop a complete framework of 3D segmentation for individual blueberries as they develop in clusters and to extract blueberry cluster traits. To achieve this goal, an image-capturing system was developed to capture blueberry images to facilitate 3D reconstruction and a 2D-3D projection-based photogrammetric pipeline was proposed to extract berry cluster traits. The reconstruction was performed for four southern highbush blueberry cultivars (‘Emerald’, ‘Farthing’, ‘Meadowlark’ and ‘Star’) with 10 cluster samples for each cultivar based on photogrammetry. A minimum bounding box was created to surround a 3D blueberry cluster to calculate compactness as the ratio of berry volume and minimum bounding box volume. Mask R-CNN was used to segment individual blueberries with the maturity property from 2D images and the instance masks were projected onto 3D point clouds to establish 2D-3D correspondences. The developed trait extraction algorithm was used to segment individual 3D blueberries to obtain berry number, individual berry volume, and berry maturity. Berry maturity was used to calculate cluster maturity as the ratio of the mature berry (blue colored fruit) number and the total berry (blue, reddish, and green colored fruit) number comprising the cluster. The accuracy of determining the fruit number in a cluster is 97.3%. The linear regression for cluster maturity has a R2 of 0.908 with a RMSE of 0.068. The cluster berry volume has a RMSE of 2.92 cm3 compared with the ground truth, indicating that the individual berry volume has an error of less than 0.292 cm3 for clusters with a berry number greater than 10. The statistical analyses of the traits for the four cultivars reveals that, in the middle of April, ‘Emerald’ and ‘Farthing’ were more compact than ‘Meadowlark’ and ‘Star’, and the mature berry volume of ‘Farthing’ was greater than ‘Emerald’ and ‘Meadowlark’, while ‘Star’ had the smallest mature berry size. This study develops an effective method based on 3D photogrammetry and 2D instance segmentation that can determine blueberry cluster traits accurately from a large number of samples and can be used for fruit development monitoring, yield estimation, and harvest time prediction.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2020
Adke, S.; Mogel, K. H. Von; Jiang, Y.; Li, C.
Instance Segmentation to Estimate Consumption of Corn Ears by Wild Animals for GMO Preference Tests Journal Article
In: Frontiers in Artificial Intelligence, vol. 3, no. 119, 2020.
@article{adke2020instane,
title = {Instance Segmentation to Estimate Consumption of Corn Ears by Wild Animals for GMO Preference Tests },
author = {S. Adke and K.H. Von Mogel and Y. Jiang and C. Li},
url = {https://www.frontiersin.org/articles/10.3389/frai.2020.593622/abstract},
year = {2020},
date = {2020-12-30},
urldate = {2020-12-30},
journal = {Frontiers in Artificial Intelligence},
volume = {3},
number = {119},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Jiang, Y.; Li, C.; Xu, R.; Sun, S.; Robertson, J. S.; Paterson, A. H.
DeepFlower: a deep learning-based approach to characterize flowering patterns of cotton plants in the field Journal Article
In: Plant Methods, vol. 16, no. 156, 2020.
@article{jiang2020deepflower,
title = {DeepFlower: a deep learning-based approach to characterize flowering patterns of cotton plants in the field},
author = {Y. Jiang and C. Li and R. Xu and S. Sun and J. S. Robertson and A.H. Paterson},
url = {https://plantmethods.biomedcentral.com/articles/10.1186/s13007-020-00698-y
https://doi.org/10.1186/s13007-020-00698-y},
year = {2020},
date = {2020-12-07},
urldate = {2020-12-07},
journal = {Plant Methods},
volume = {16},
number = {156},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Iqbal, Jawad; Xu, Rui; Halloran, Hunter; Li, Changying
Development of a Multi-Purpose Autonomous Differential Drive Mobile Robot for Plant Phenotyping and Soil Sensing Journal Article
In: Electronics, vol. 9, no. 9, pp. 1550, 2020.
@article{iqbal2020maria,
title = {Development of a Multi-Purpose Autonomous Differential Drive Mobile Robot for Plant Phenotyping and Soil Sensing},
author = {Jawad Iqbal and Rui Xu and Hunter Halloran and Changying Li },
url = {https://www.mdpi.com/2079-9292/9/9/1550},
year = {2020},
date = {2020-09-15},
urldate = {2020-09-15},
journal = {Electronics},
volume = {9},
number = {9},
pages = {1550},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ni, X.; Li, C.; Jiang, H.; Takeda., F.
Deep learning image segmentation and extraction of blueberry fruit traits associated with harvestability and yield Journal Article
In: Horticulture Research, vol. 7, no. 1, pp. 1-14, 2020.
@article{Ni2020,
title = {Deep learning image segmentation and extraction of blueberry fruit traits associated with harvestability and yield},
author = {Ni, X. and C. Li and H. Jiang and F. Takeda. },
url = {https://www.nature.com/articles/s41438-020-0323-3},
year = {2020},
date = {2020-07-01},
urldate = {2020-07-01},
journal = {Horticulture Research},
volume = {7},
number = {1},
pages = {1-14},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Iqbal, Jawad; Xu, Rui; Sun, Shangpeng; Li, Changying
Simulation of an autonomous mobile robot for LiDAR-based in-field phenotyping and navigation Journal Article
In: Robotics, vol. 9, no. 2, pp. 46, 2020.
@article{Iqbal2020,
title = {Simulation of an autonomous mobile robot for LiDAR-based in-field phenotyping and navigation},
author = {Jawad Iqbal and Rui Xu and Shangpeng Sun and Changying Li},
year = {2020},
date = {2020-06-15},
journal = {Robotics},
volume = {9},
number = {2},
pages = {46},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Jiang, Yu; Li, Changying
Convolutional neural networks for image-based high throughput plant phenotyping: A review Journal Article
In: Plant Phenomics, vol. 2020, no. 4152816, 2020.
@article{Yu2020,
title = {Convolutional neural networks for image-based high throughput plant phenotyping: A review},
author = {Yu Jiang and Changying Li},
url = {https://spj.sciencemag.org/journals/plantphenomics/2020/4152816/},
doi = {https://doi.org/10.34133/2020/4152816.},
year = {2020},
date = {2020-02-20},
urldate = {2020-02-20},
journal = {Plant Phenomics},
volume = {2020},
number = {4152816},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhang, M.; Jiang, Y.; Li, C.; Yang, F.
Fully convolutional networks for blueberry bruising and calyx segmentation using hyperspectral transmittance imaging Journal Article
In: Biosystems Engineering, vol. 192, pp. 159-175, 2020.
@article{Zhang2019b,
title = {Fully convolutional networks for blueberry bruising and calyx segmentation using hyperspectral transmittance imaging },
author = {M. Zhang and Y. Jiang and C. Li and F. Yang},
url = {https://www.sciencedirect.com/science/article/pii/S1537511020300301?dgcid=author},
doi = {https://doi.org/10.1016/j.biosystemseng.2020.01.018},
year = {2020},
date = {2020-01-27},
urldate = {2020-01-27},
journal = {Biosystems Engineering},
volume = {192},
pages = {159-175},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2019
Jiang, Y.; Li, C.; Paterson, A.; Robertson, J.
DeepSeedling: Deep convolutional network and Kalman filter for plant seedling detection and counting in the field Journal Article
In: Plant Methods, vol. 15, no. 1, pp. 141, 2019.
@article{Jiang2019,
title = {DeepSeedling: Deep convolutional network and Kalman filter for plant seedling detection and counting in the field },
author = { Y. Jiang and C. Li and A. Paterson and J. Robertson},
url = {https://plantmethods.biomedcentral.com/articles/10.1186/s13007-019-0528-3#citeas},
doi = {https://doi.org/10.1186/s13007-019-0528-3},
year = {2019},
date = {2019-11-23},
urldate = {2019-11-23},
journal = {Plant Methods},
volume = {15},
number = {1},
pages = {141},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Xu, R.; Li, C.; Paterson, A. H.
Multispectral imaging and unmanned aerial systems for cotton plant phenotyping Journal Article
In: PLoS One, no. 0205083, 2019.
@article{Xu201901,
title = {Multispectral imaging and unmanned aerial systems for cotton plant phenotyping},
author = {R. Xu and C. Li and A.H. Paterson},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Multispectral-imaging-and-unmanned-aerial-systems-for-cotton-plant-phenotyping.pdf},
doi = {https://doi.org/10.1371/journal.pone.0205083},
year = {2019},
date = {2019-02-27},
urldate = {2019-02-27},
journal = {PLoS One},
number = {0205083},
abstract = {Xu, R., Li, C., & Paterson, A. H. (2019). Multispectral imaging and unmanned aerial systems for cotton plant phenotyping. PloS one, 14(2), e0205083.
Size and shape are important properties of shrub crops such as blueberries, and they can be particularly useful for evaluating bush architecture suited to mechanical harvesting. The overall goal of this study was to develop a 3D imaging approach to measure size-related traits and bush shape that are relevant to mechanical harvesting. 3D point clouds were acquired for 367 bushes from five genotype groups. Point cloud data were preprocessed to obtain clean bush points for characterizing bush architecture, including bush morphology (height, width, and volume), crown size, and shape descriptors (path curve λ and five shape indices). One-dimensional traits (height, width, and crown size) had high correlations (R2 = 0.88–0.95) between proposed method and manual measurements, whereas bush volume showed relatively lower correlations (R2 = 0.78–0.85). These correlations suggested that the present approach was accurate in measuring one-dimensional size traits and acceptable in estimating three-dimensional bush volume. Statistical results demonstrated that the five genotype groups were statistically different in crown size and bush shape. The differences matched with human evaluation regarding optimal bush architecture for mechanical harvesting. In particular, a visualization tool could be generated using crown size and path curve λ, which showed great potential of determining bush architecture suitable for mechanical harvesting quickly. Therefore, the processing pipeline of 3D point cloud data presented in this study is an effective tool for blueberry breeding programs (in particular for mechanical harvesting) and farm management.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Size and shape are important properties of shrub crops such as blueberries, and they can be particularly useful for evaluating bush architecture suited to mechanical harvesting. The overall goal of this study was to develop a 3D imaging approach to measure size-related traits and bush shape that are relevant to mechanical harvesting. 3D point clouds were acquired for 367 bushes from five genotype groups. Point cloud data were preprocessed to obtain clean bush points for characterizing bush architecture, including bush morphology (height, width, and volume), crown size, and shape descriptors (path curve λ and five shape indices). One-dimensional traits (height, width, and crown size) had high correlations (R2 = 0.88–0.95) between proposed method and manual measurements, whereas bush volume showed relatively lower correlations (R2 = 0.78–0.85). These correlations suggested that the present approach was accurate in measuring one-dimensional size traits and acceptable in estimating three-dimensional bush volume. Statistical results demonstrated that the five genotype groups were statistically different in crown size and bush shape. The differences matched with human evaluation regarding optimal bush architecture for mechanical harvesting. In particular, a visualization tool could be generated using crown size and path curve λ, which showed great potential of determining bush architecture suitable for mechanical harvesting quickly. Therefore, the processing pipeline of 3D point cloud data presented in this study is an effective tool for blueberry breeding programs (in particular for mechanical harvesting) and farm management.
2017
Xu, R.; Li, C.; Paterson, A. H.; Jiang, Y.; Sun, S.; Roberson, J.
Aerial Images and Convolutional Neural Network for Cotton Bloom Detection Journal Article
In: Frontiers in Plant Sciences, 8, 2235, 2017.
@article{Xu2018,
title = {Aerial Images and Convolutional Neural Network for Cotton Bloom Detection},
author = {R. Xu and C. Li and A.H. Paterson and Y. Jiang and S. Sun and J. Roberson},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Aerial-Images-and-Convolutional-Neural-Network-for-Cotton-Bloom-Detection.pdf},
doi = {10.3389/fpls.2017.02235},
year = {2017},
date = {2017-12-19},
urldate = {2017-12-19},
journal = {Frontiers in Plant Sciences, 8, 2235},
abstract = {Xu, R., Li, C., Paterson, A. H., Jiang, Y., Sun, S., & Robertson, J. S. (2018). Aerial images and convolutional neural network for cotton bloom detection. Frontiers in Plant Science, 8, 2235.
Monitoring flower development can provide useful information for production management, estimating yield and selecting specific genotypes of crops. The main goal of this study was to develop a methodology to detect and count cotton flowers, or blooms, using color images acquired by an unmanned aerial system. The aerial images were collected from two test fields in 4 days. A convolutional neural network (CNN) was designed and trained to detect cotton blooms in raw images, and their 3D locations were calculated using the dense point cloud constructed from the aerial images with the structure from motion method. The quality of the dense point cloud was analyzed and plots with poor quality were excluded from data analysis. A constrained clustering algorithm was developed to register the same bloom detected from different images based on the 3D location of the bloom. The accuracy and incompleteness of the dense point cloud were analyzed because they affected the accuracy of the 3D location of the blooms and thus the accuracy of the bloom registration result. The constrained clustering algorithm was validated using simulated data, showing good efficiency and accuracy. The bloom count from the proposed method was comparable with the number counted manually with an error of −4 to 3 blooms for the field with a single plant per plot. However, more plots were underestimated in the field with multiple plants per plot due to hidden blooms that were not captured by the aerial images. The proposed methodology provides a high-throughput method to continuously monitor the flowering progress of cotton.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Monitoring flower development can provide useful information for production management, estimating yield and selecting specific genotypes of crops. The main goal of this study was to develop a methodology to detect and count cotton flowers, or blooms, using color images acquired by an unmanned aerial system. The aerial images were collected from two test fields in 4 days. A convolutional neural network (CNN) was designed and trained to detect cotton blooms in raw images, and their 3D locations were calculated using the dense point cloud constructed from the aerial images with the structure from motion method. The quality of the dense point cloud was analyzed and plots with poor quality were excluded from data analysis. A constrained clustering algorithm was developed to register the same bloom detected from different images based on the 3D location of the bloom. The accuracy and incompleteness of the dense point cloud were analyzed because they affected the accuracy of the 3D location of the blooms and thus the accuracy of the bloom registration result. The constrained clustering algorithm was validated using simulated data, showing good efficiency and accuracy. The bloom count from the proposed method was comparable with the number counted manually with an error of −4 to 3 blooms for the field with a single plant per plot. However, more plots were underestimated in the field with multiple plants per plot due to hidden blooms that were not captured by the aerial images. The proposed methodology provides a high-throughput method to continuously monitor the flowering progress of cotton.
Patrick, A.; Li, C.
High Throughput Phenotyping of Blueberry Bush Morphological Traits Using Unmanned Aerial Systems Journal Article
In: Remote Sensing, 9(12), 1250, 2017.
@article{Patrick2017,
title = {High Throughput Phenotyping of Blueberry Bush Morphological Traits Using Unmanned Aerial Systems},
author = {A. Patrick and C. Li
},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/High-Throughput-Phenotyping-of-Blueberry-Bush-Morphological-Traits-Using-Unmanned-Aerial-Systems.pdf},
doi = {10.3390/rs9121250},
year = {2017},
date = {2017-11-30},
urldate = {2017-11-30},
journal = {Remote Sensing, 9(12), 1250},
abstract = {Patrick, A., & Li, C. (2017). High throughput phenotyping of blueberry bush morphological traits using unmanned aerial systems. Remote Sensing, 9(12), 1250.
Phenotyping morphological traits of blueberry bushes in the field is important for selecting genotypes that are easily harvested by mechanical harvesters. Morphological data can also be used to assess the effects of crop treatments such as plant growth regulators, fertilizers, and environmental conditions. This paper investigates the feasibility and accuracy of an inexpensive unmanned aerial system in determining the morphological characteristics of blueberry bushes. Color images collected by a quadcopter are processed into three-dimensional point clouds via structure from motion algorithms. Bush height, extents, canopy area, and volume, in addition to crown diameter and width, are derived and referenced to ground truth. In an experimental farm, twenty-five bushes were imaged by a quadcopter. Height and width dimensions achieved a mean absolute error of 9.85 cm before and 5.82 cm after systematic under-estimation correction. Strong correlation was found between manual and image derived bush volumes and their traditional growth indices. Hedgerows of three Southern Highbush varieties were imaged at a commercial farm to extract five morphological features (base angle, blockiness, crown percent height, crown ratio, and vegetation ratio) associated with cultivation and machine harvestability. The bushes were found to be partially separable by multivariate analysis. The methodology developed from this study is not only valuable for plant breeders to screen genotypes with bush morphological traits that are suitable for machine harvest, but can also aid producers in crop management such as pruning and plot layout organization.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Phenotyping morphological traits of blueberry bushes in the field is important for selecting genotypes that are easily harvested by mechanical harvesters. Morphological data can also be used to assess the effects of crop treatments such as plant growth regulators, fertilizers, and environmental conditions. This paper investigates the feasibility and accuracy of an inexpensive unmanned aerial system in determining the morphological characteristics of blueberry bushes. Color images collected by a quadcopter are processed into three-dimensional point clouds via structure from motion algorithms. Bush height, extents, canopy area, and volume, in addition to crown diameter and width, are derived and referenced to ground truth. In an experimental farm, twenty-five bushes were imaged by a quadcopter. Height and width dimensions achieved a mean absolute error of 9.85 cm before and 5.82 cm after systematic under-estimation correction. Strong correlation was found between manual and image derived bush volumes and their traditional growth indices. Hedgerows of three Southern Highbush varieties were imaged at a commercial farm to extract five morphological features (base angle, blockiness, crown percent height, crown ratio, and vegetation ratio) associated with cultivation and machine harvestability. The bushes were found to be partially separable by multivariate analysis. The methodology developed from this study is not only valuable for plant breeders to screen genotypes with bush morphological traits that are suitable for machine harvest, but can also aid producers in crop management such as pruning and plot layout organization.
Patrick, A.; Pelham, S.; Culbreath, A.; Holbrook, C.; Godoy, I. J. d.; Li, C.
High Throughput Phenotyping of Tomato Spot Wilt Disease in Peanuts Using Unmanned Aerial Systems and Multispectral Imaging Journal Article
In: IEEE Instrumentation & Measurement Magazine, 20(3), 4-12, 2017.
@article{Patrick2017b,
title = {High Throughput Phenotyping of Tomato Spot Wilt Disease in Peanuts Using Unmanned Aerial Systems and Multispectral Imaging},
author = {A. Patrick and S. Pelham and A. Culbreath and C. Holbrook and I.J.d. Godoy and C. Li},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/High-Throughput-Phenotyping-of-Tomato-Spot-Wilt-Disease-in-Peanuts-Using-Unmanned-Aerial-Systems-and-Multispectral-Imaging.pdf},
doi = {10.1109/MIM.2017.7951684},
year = {2017},
date = {2017-02-08},
urldate = {2017-02-08},
journal = {IEEE Instrumentation & Measurement Magazine, 20(3), 4-12},
abstract = {Patrick, A., Pelham, S., Culbreath, A., Holbrook, C. C., De Godoy, I. J., & Li, C. (2017). High throughput phenotyping of tomato spot wilt disease in peanuts using unmanned aerial systems and multispectral imaging. IEEE Instrumentation & Measurement Magazine, 20(3), 4-12.
The amount of visible and near infrared light reflected by plants varies depending on their health. In this study, multispectral images were acquired by a quadcopter for high throughput phenotyping of tomato spot wilt disease resistance among twenty genotypes of peanuts. The plants were visually assessed to acquire ground truth ratings of disease incidence. Multispectral images were processed into several vegetation indices. The vegetation index image of each plot has a unique distribution of pixel intensities. The percentage and number of pixels above and below varying thresholds were extracted. These features were correlated with manually acquired data to develop a model for assessing the percentage of each plot diseased. Ultimately, the best vegetation indices and pixel distribution feature for disease detection were determined and correlated with manual ratings and yield. The relative resistance of each genotype was then compared. Image-based disease ratings effectively ranked genotype resistance as early as 93 days from seeding.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
The amount of visible and near infrared light reflected by plants varies depending on their health. In this study, multispectral images were acquired by a quadcopter for high throughput phenotyping of tomato spot wilt disease resistance among twenty genotypes of peanuts. The plants were visually assessed to acquire ground truth ratings of disease incidence. Multispectral images were processed into several vegetation indices. The vegetation index image of each plot has a unique distribution of pixel intensities. The percentage and number of pixels above and below varying thresholds were extracted. These features were correlated with manually acquired data to develop a model for assessing the percentage of each plot diseased. Ultimately, the best vegetation indices and pixel distribution feature for disease detection were determined and correlated with manual ratings and yield. The relative resistance of each genotype was then compared. Image-based disease ratings effectively ranked genotype resistance as early as 93 days from seeding.
2007
Li, C.; Heinemann, P.
ANN integrated electronic nose system for apple quality evaluation Journal Article
In: Transactions of the ASABE, 50(6), 2285-2294, 2007.
@article{Li2007,
title = {ANN integrated electronic nose system for apple quality evaluation},
author = {C. Li and P. Heinemann},
doi = {10.13031/2013.24081},
year = {2007},
date = {2007-07-12},
urldate = {2007-07-12},
journal = {Transactions of the ASABE, 50(6), 2285-2294},
abstract = {The fresh produce industry generates more than one billion dollars each year in the U.S. market. However, fresh produce departments in grocery stores experience as much as 10% loss because the apples contain undetected defects and deteriorate in quality before they can be sold. Apple defects can create sites for pathogen development, which can cause foodborne illness. It is important to develop a non-destructive system for rapid detection and classification of defective fresh produce. In this study, an artificial neural network (ANN) based electronic nose and zNoseTM system was developed to detect physically damaged apples. Principal component analysis was used for clustering plot and feature extraction. The first five principal components were selected for the electronic nose data input, and the first ten principal components were selected for the zNoseTM spectrum data. Different ANN models, back-propagation networks (BP), probabilistic neural networks (PNN), and learning vector quantification networks (LVQ), were built and compared based on their classification accuracy, sensitivity and specificity, generalization, and incremental learning performance. For the Enose data, the BP and PNN classification rate of 85.3% and 85.1%, respectively, was better than the LVQ classification rate of 73.7%; for the zNoseTM data, the three ANN models had similar performances, which were less favorable than the Enose, with classification rates of 77%, 76.8% and 74.3%. The three ANN models' performances were also measured by their sensitivity, specificity, generalization, and incremental learning.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Li, C.; Heinemann, P.; Sherry, R.
Neural network and Bayesian network fusion models to fuse electronic nose and surface acoustic wave sensor data for apple defect detection Journal Article
In: Sensors and Actuators B: Chemical, 125(1), 301-310, 2007.
@article{Li2007b,
title = {Neural network and Bayesian network fusion models to fuse electronic nose and surface acoustic wave sensor data for apple defect detection},
author = {C. Li and P. Heinemann and R. Sherry},
doi = {10.1016/j.snb.2007.02.027},
year = {2007},
date = {2007-02-26},
urldate = {2007-02-26},
journal = {Sensors and Actuators B: Chemical, 125(1), 301-310},
abstract = {The Cyranose 320 electronic nose (Enose) and zNose™ are two instruments used to detect volatile profiles. In this research, feature level and decision level multisensor data fusion models, combined with covariance matrix adaptation evolutionary strategy (CMAES), were developed to fuse the Enose and zNose data to improve detection and classification performance for damaged apples compared with using the individual instruments alone. Principal component analysis (PCA) was used for feature extraction and probabilistic neural networks (PNN) were developed as the classifier. Three feature-based fusion schemes were compared. Dynamic selective fusion achieved an average 1.8% and a best 0% classification error rate in a total of 30 independent runs. The static selective fusion approach resulted in a 6.1% classification error rate, which was not as good as using individual sensors (4.2% for the Enose and 2.6% for the zNose) if only selected features were applied. Simply adding the Enose and zNose features without selection (non-selective fusion) worsened the classification performance with a 32.5% classification error rate. This indicated that the feature selection using the CMAES is an indispensable process in multisensor data fusion, especially if multiple sources of sensors contain much irrelevant or redundant information. At the decision level, Bayesian network fusion achieved better performance than two individual sensors, with 11% error rate versus 13% error rate for the Enose and 20% error rate for the zNose. It is shown that both the feature level fusion with the CMAES optimization algorithms and decision level fusion using a Bayesian network as a classifier improved system classification performance. This methodology can also be applied to other sensor fusion applications.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2006
Li, C.; Heinemann, P.; Reed, P.
Using genetic algorithms (GAs) and CMA evolutionary strategy to optimize electronic nose sensor selection Journal Article
In: Transactions of the ASABE, 51(1), 321-330, 2006.
@article{Li2006,
title = {Using genetic algorithms (GAs) and CMA evolutionary strategy to optimize electronic nose sensor selection},
author = {C. Li and P. Heinemann and P. Reed},
doi = {10.13031/2013.21505},
year = {2006},
date = {2006-07-06},
urldate = {2006-07-06},
journal = {Transactions of the ASABE, 51(1), 321-330},
abstract = {The high dimensionality of electronic nose data increases the difficulty of their use in classification models. Reducing this high dimensionality helps reduce variable numbers, improve classification accuracy, and reduce computation time and sensor cost. In this research, the Cyranose 320 electronic nose, which was used for apple defect detection, was optimized by selecting only the most relevant of its internal 32 sensors using different selection methods. Two robust heuristic optimization algorithms, genetic algorithm (GA) and covariance matrix adaptation evolutionary strategy (CMAES), were applied and compared. Although both algorithms searched the optimal sensors resulting in a best classification error rate of 4.4%, the average classification error rate of CMA over 30 random seed runs was 5.0% (s.d.=0.006) which was better than 5.2% (s.d.=0.004) from the GA. The final optimal solution sets obtained by integer GA showed that including more sensors did not guarantee better classification performance. The best reduction in classification error rate was 10% while the number of sensors was reduced 78%. This study provided a robust and efficient optimization approach to reduce high data dimensionality of the electronic nose data, which substantially improved electronic nose performance in apple defect detection while potentially reducing the overall electronic nose cost for future specific applications.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}