2023
Liu, Yuxuan; Hou, Jixin; Li, Changying; Wang, Xianqiao
Intelligent Soft Robotic Grippers for Agricultural and Food Product Handling: A Brief Review with a Focus on Design and Control Journal Article
In: Advanced Intelligent Systems, vol. n/a, no. n/a, pp. 2300233, 2023.
Abstract | Links | BibTeX | Tags: agricultural robots, end-effectors, grasping, grippers, harvesting robots, pick and place, sensors
@article{https://doi.org/10.1002/aisy.202300233,
title = {Intelligent Soft Robotic Grippers for Agricultural and Food Product Handling: A Brief Review with a Focus on Design and Control},
author = {Yuxuan Liu and Jixin Hou and Changying Li and Xianqiao Wang},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/aisy.202300233},
doi = {https://doi.org/10.1002/aisy.202300233},
year = {2023},
date = {2023-10-08},
journal = {Advanced Intelligent Systems},
volume = {n/a},
number = {n/a},
pages = {2300233},
abstract = {Advances in material sciences, control algorithms, and manufacturing techniques have facilitated rapid progress in soft grippers, propelling their adoption in various fields. In this review article, a comprehensive overview of the design and control aspects of intelligent soft robotic grippers tailored specifically for agricultural product handling is provided. Soft grippers have emerged as a promising solution for handling delicate and fragile objects. In this article, the recent progress in various gripper design, including fluidic and mechanical grippers, is elucidated and the role of advanced control approaches in enabling intelligent functions, such as object classification and grasping condition evaluation, is explored. Moreover, the challenges and opportunities pertaining to implementation of soft grippers in the agricultural industry are thoroughly discussed. While most demonstrations of soft grippers and their control strategies remain at the experimental stage, in this article, it is aimed to provide insights into the potential applications of soft grippers in agricultural product handling, thereby inspiring future research in this field.},
keywords = {agricultural robots, end-effectors, grasping, grippers, harvesting robots, pick and place, sensors},
pubstate = {published},
tppubtype = {article}
}
Lu, Guoyu; Li, Sheng; Mai, Gengchen; Sun, Jin; Zhu, Dajiang; Chai, Lilong; Sun, Haijian; Wang, Xianqiao; Dai, Haixing; Liu, Ninghao; Xu, Rui; Petti, Daniel; Li, Changying; Liu, Tianming; Li, Changying
AGI for Agriculture Journal Article
In: 2023.
Abstract | Links | BibTeX | Tags: 3D reconstruction, AGI, Deep convolutional neural network, deep learning, High-throughput phenotyping, object detection, phenotyping robot, robotics
@article{lu2023agi,
title = {AGI for Agriculture},
author = {Guoyu Lu and Sheng Li and Gengchen Mai and Jin Sun and Dajiang Zhu and Lilong Chai and Haijian Sun and Xianqiao Wang and Haixing Dai and Ninghao Liu and Rui Xu and Daniel Petti and Changying Li and Tianming Liu and Changying Li},
url = {https://arxiv.org/abs/2304.06136},
year = {2023},
date = {2023-04-12},
urldate = {2023-01-01},
abstract = {Artificial General Intelligence (AGI) is poised to revolutionize a variety of sectors, including healthcare, finance, transportation, and education. Within healthcare, AGI is being utilized to analyze clinical medical notes, recognize patterns in patient data, and aid in patient management. Agriculture is another critical sector that impacts the lives of individuals worldwide. It serves as a foundation for providing food, fiber, and fuel, yet faces several challenges, such as climate change, soil degradation, water scarcity, and food security. AGI has the potential to tackle these issues by enhancing crop yields, reducing waste, and promoting sustainable farming practices. It can also help farmers make informed decisions by leveraging real-time data, leading to more efficient and effective farm management. This paper delves into the potential future applications of AGI in agriculture, such as agriculture image processing, natural language processing (NLP), robotics, knowledge graphs, and infrastructure, and their impact on precision livestock and precision crops. By leveraging the power of AGI, these emerging technologies can provide farmers with actionable insights, allowing for optimized decision-making and increased productivity. The transformative potential of AGI in agriculture is vast, and this paper aims to highlight its potential to revolutionize the industry. },
keywords = {3D reconstruction, AGI, Deep convolutional neural network, deep learning, High-throughput phenotyping, object detection, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
Saeed, Farah; Sun, Shangpeng; Rodriguez-Sanchez, Javier; Snider, John; Liu, Tianming; Li, Changying
Cotton plant part 3D segmentation and architectural trait extraction using point voxel convolutional neural networks Journal Article
In: Plant Methods, vol. 19, no. 1, pp. 33, 2023, ISSN: 1746-4811.
Abstract | Links | BibTeX | Tags: deep learning, High-throughput phenotyping, LiDAR, machine learning
@article{Saeed2023,
title = {Cotton plant part 3D segmentation and architectural trait extraction using point voxel convolutional neural networks},
author = {Farah Saeed and Shangpeng Sun and Javier Rodriguez-Sanchez and John Snider and Tianming Liu and Changying Li},
url = {https://doi.org/10.1186/s13007-023-00996-1},
doi = {10.1186/s13007-023-00996-1},
issn = {1746-4811},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Plant Methods},
volume = {19},
number = {1},
pages = {33},
abstract = {Plant architecture can influence crop yield and quality. Manual extraction of architectural traits is, however, time-consuming, tedious, and error prone. The trait estimation from 3D data addresses occlusion issues with the availability of depth information while deep learning approaches enable learning features without manual design. The goal of this study was to develop a data processing workflow by leveraging 3D deep learning models and a novel 3D data annotation tool to segment cotton plant parts and derive important architectural traits.},
keywords = {deep learning, High-throughput phenotyping, LiDAR, machine learning},
pubstate = {published},
tppubtype = {article}
}
Herr, Andrew W.; Adak, Alper; Carroll, Matthew E.; Elango, Dinakaran; Kar, Soumyashree; Li, Changying; Jones, Sarah E.; Carter, Arron H.; Murray, Seth C.; Paterson, Andrew; Sankaran, Sindhuja; Singh, Arti; Singh, Asheesh K.
Unoccupied aerial systems imagery for phenotyping in cotton, maize, soybean, and wheat breeding Journal Article
In: Crop Science, vol. 63, no. 4, pp. 1722-1749, 2023.
Abstract | Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, review
@article{https://doi.org/10.1002/csc2.21028,
title = {Unoccupied aerial systems imagery for phenotyping in cotton, maize, soybean, and wheat breeding},
author = {Andrew W. Herr and Alper Adak and Matthew E. Carroll and Dinakaran Elango and Soumyashree Kar and Changying Li and Sarah E. Jones and Arron H. Carter and Seth C. Murray and Andrew Paterson and Sindhuja Sankaran and Arti Singh and Asheesh K. Singh},
url = {https://acsess.onlinelibrary.wiley.com/doi/abs/10.1002/csc2.21028},
doi = {https://doi.org/10.1002/csc2.21028},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Crop Science},
volume = {63},
number = {4},
pages = {1722-1749},
abstract = {Abstract High-throughput phenotyping (HTP) with unoccupied aerial systems (UAS), consisting of unoccupied aerial vehicles (UAV; or drones) and sensor(s), is an increasingly promising tool for plant breeders and researchers. Enthusiasm and opportunities from this technology for plant breeding are similar to the emergence of genomic tools ∼30 years ago, and genomic selection more recently. Unlike genomic tools, HTP provides a variety of strategies in implementation and utilization that generate big data on the dynamic nature of plant growth formed by temporal interactions between growth and environment. This review lays out strategies deployed across four major staple crop species: cotton (Gossypium hirsutum L.), maize (Zea mays L.), soybean (Glycine max L.), and wheat (Triticum aestivum L.). Each crop highlighted in this review demonstrates how UAS-collected data are employed to automate and improve estimation or prediction of objective phenotypic traits. Each crop section includes four major topics: (a) phenotyping of routine traits, (b) phenotyping of previously infeasible traits, (c) sample cases of UAS application in breeding, and (d) implementation of phenotypic and phenomic prediction and selection. While phenotyping of routine agronomic and productivity traits brings advantages in time and resource optimization, the most potentially beneficial application of UAS data is in collecting traits that were previously difficult or impossible to quantify, improving selection efficiency of important phenotypes. In brief, UAS sensor technology can be used for measuring abiotic stress, biotic stress, crop growth and development, as well as productivity. These applications and the potential implementation of machine learning strategies allow for improved prediction, selection, and efficiency within breeding programs, making UAS HTP a potentially indispensable asset.},
keywords = {agricultural robot, High-throughput phenotyping, review},
pubstate = {published},
tppubtype = {article}
}
Kaur, Navneet; Snider, John L.; Paterson, Andrew H.; Grey, Timothy L.; Li, Changying; Virk, Gurpreet; Parkash, Ved
In: Plant Physiology and Biochemistry, vol. 201, pp. 107868, 2023, ISSN: 0981-9428.
Abstract | Links | BibTeX | Tags: Photosynthesis, Thermotolerance, Thylakoid reactions
@article{KAUR2023107868,
title = {Variation in thermotolerance of photosystem II energy trapping, intersystem electron transport, and photosystem I electron acceptor reduction for diverse cotton genotypes},
author = {Navneet Kaur and John L. Snider and Andrew H. Paterson and Timothy L. Grey and Changying Li and Gurpreet Virk and Ved Parkash},
url = {https://www.sciencedirect.com/science/article/pii/S0981942823003790},
doi = {https://doi.org/10.1016/j.plaphy.2023.107868},
issn = {0981-9428},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Plant Physiology and Biochemistry},
volume = {201},
pages = {107868},
abstract = {Cotton breeding programs have focused on agronomically-desirable traits. Without targeted selection for tolerance to high temperature extremes, cotton will likely be more vulnerable to environment-induced yield loss. Recently-developed methods that couple chlorophyll fluorescence induction measurements with temperature response experiments could be used to identify genotypic variation in photosynthetic thermotolerance of specific photosynthetic processes for field-grown plants. It was hypothesized that diverse cotton genotypes would differ significantly in photosynthetic thermotolerance, specific thylakoid processes would exhibit differential sensitivities to high temperature, and that the most heat tolerant process would exhibit substantial genotypic variation in thermotolerance plasticity. A two-year field experiment was conducted at Tifton and Athens, Georgia, USA. Experiments included 10 genotypes in 2020 and 11 in 2021. Photosynthetic thermotolerance for field-collected leaf samples was assessed by determining the high temperature threshold resulting in a 15% decline in photosynthetic efficiency (T15) for energy trapping by photosystem II (ΦPo), intersystem electron transport (ΦEo), and photosystem I end electron acceptor reduction (ΦRo). Significant genotypic variation in photosynthetic thermotolerance was observed, but the response was dependent on location and photosynthetic parameter assessed. ΦEo was substantially more heat sensitive than ΦPo or ΦRo. Significant genotypic variation in thermotolerance plasticity of ΦEo was also observed. Identifying the weakest link in photosynthetic tolerance to high temperature will facilitate future selection efforts by focusing on the most heat-susceptible processes. Given the genotypic differences in environmental plasticity observed here, future research should evaluate genotypic variation in acclimation potential in controlled environments.},
keywords = {Photosynthesis, Thermotolerance, Thylakoid reactions},
pubstate = {published},
tppubtype = {article}
}
Tan, Chenjiao; Li, Changying; He, Dongjian; Song, Huaibo
Anchor-free deep convolutional neural network for tracking and counting cotton seedlings and flowers Journal Article
In: Computers and Electronics in Agriculture, vol. 215, pp. 108359, 2023, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: Anchor free, CNN, Counting, Deep convolutional network, High-throughput phenotyping, object detection, Plant and plant organ, Tracking
@article{Tan2023a,
title = {Anchor-free deep convolutional neural network for tracking and counting cotton seedlings and flowers},
author = {Chenjiao Tan and Changying Li and Dongjian He and Huaibo Song},
url = {https://www.sciencedirect.com/science/article/pii/S0168169923007470},
doi = {https://doi.org/10.1016/j.compag.2023.108359},
issn = {0168-1699},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {215},
pages = {108359},
abstract = {Accurate counting of plants and their organs in natural environments is essential for breeders and growers. For breeders, counting plants during the seedling stage aids in selecting genotypes with superior emergence rates, while for growers, it informs decisions about potential replanting. Meanwhile, counting specific plant organs, such as flowers, forecasts yields for different genotypes, offering insights into production levels. The overall goal of this study was to investigate a deep convolutional neural network-based tracking method, CenterTrack, for cotton seedling and flower counting from video frames. The network is extended from a customized CenterNet, which is an anchor-free object detector. CenterTrack predicts the detections of the current frame and displacements of detections between the previous frame and the current frame, which are used to associate the same object in consecutive frames. The modified CenterNet detector achieved high accuracy on both seedling and flower datasets with an overall AP50 of 0.962. The video tracking hyperparameters were optimized for each dataset using orthogonal tests. Experimental results showed that seedling and flower counts with optimized hyperparameters highly correlated with those of manual counts (R2 = 0.98 andR2 = 0.95) and the mean relative errors of 75 cotton seedling testing videos and 50 flower testing videos were 5.5 % and 10.8 %, respectively. An average counting speed of 20.4 frames per second was achieved with an input resolution of 1920 × 1080 pixels for both seedling and flower videos. The anchor-free deep convolution neural network-based tracking method provides automatic tracking and counting in video frames, which will significantly benefit plant breeding and crop management.},
keywords = {Anchor free, CNN, Counting, Deep convolutional network, High-throughput phenotyping, object detection, Plant and plant organ, Tracking},
pubstate = {published},
tppubtype = {article}
}
2022
Xu, Rui; Li, Changying
A review of field-based high-throughput phenotyping systems: focusing on ground robots Journal Article
In: Plant Phenomics, vol. 2022, no. Article ID 9760269, pp. 20, 2022.
Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, phenotyping robot, review, robotics
@article{nokey,
title = {A review of field-based high-throughput phenotyping systems: focusing on ground robots},
author = {Rui Xu and Changying Li},
url = {https://spj.sciencemag.org/journals/plantphenomics/2022/9760269/},
doi = {https://doi.org/10.34133/2022/9760269.},
year = {2022},
date = {2022-06-18},
urldate = {2022-06-18},
journal = {Plant Phenomics},
volume = {2022},
number = {Article ID 9760269},
pages = {20},
keywords = {agricultural robot, High-throughput phenotyping, phenotyping robot, review, robotics},
pubstate = {published},
tppubtype = {article}
}
Rodriguez-Sanchez, Javier; Li, Changying; Paterson, Andrew
Cotton yield estimation from aerial imagery using machine learning approaches Journal Article
In: Frontiers in Plant Science, vol. 13, 2022.
Links | BibTeX | Tags: High-throughput phenotyping, machine learning
@article{nokey,
title = {Cotton yield estimation from aerial imagery using machine learning approaches},
author = {Javier Rodriguez-Sanchez and Changying Li and Andrew Paterson},
url = {https://www.frontiersin.org/articles/10.3389/fpls.2022.870181/full},
year = {2022},
date = {2022-04-01},
urldate = {2022-04-01},
journal = {Frontiers in Plant Science},
volume = {13},
keywords = {High-throughput phenotyping, machine learning},
pubstate = {published},
tppubtype = {article}
}
Jiang, Bo; Song, Huaibo; Wang, Han; Li, Changying
Dairy cow lameness detection using a back curvature feature Journal Article
In: Computers and Electronics in Agriculture, vol. 194, iss. 106729, no. 106729, 2022.
@article{nokey,
title = {Dairy cow lameness detection using a back curvature feature},
author = {Bo Jiang and Huaibo Song and Han Wang and Changying Li},
url = {https://www.sciencedirect.com/science/article/pii/S0168169922000461},
doi = {https://doi.org/10.1016/j.compag.2022.106729},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
journal = {Computers and Electronics in Agriculture},
volume = {194},
number = {106729},
issue = {106729},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Petti, Daniel; Li, Changying
Weakly-supervised learning to automatically count cotton flowers from aerial imagery Journal Article
In: Computers and Electronics in Agriculture, vol. 194, pp. 106734, 2022, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: Active learning, deep learning, High-throughput phenotyping, machine learning, Multiple-instance learning, Object counting
@article{Petti2022,
title = {Weakly-supervised learning to automatically count cotton flowers from aerial imagery},
author = {Daniel Petti and Changying Li},
url = {https://www.sciencedirect.com/science/article/pii/S0168169922000515},
doi = {https://doi.org/10.1016/j.compag.2022.106734},
issn = {0168-1699},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {194},
pages = {106734},
abstract = {Counting plant flowers is a common task with applications for estimating crop yields and selecting favorable genotypes. Typically, this requires a laborious manual process, rendering it impractical to obtain accurate flower counts throughout the growing season. The model proposed in this study uses weak supervision, based on Convolutional Neural Networks (CNNs), which automates such a counting task for cotton flowers using imagery collected from an unmanned aerial vehicle (UAV). Furthermore, the model is trained using Multiple Instance Learning (MIL) in order to reduce the required amount of annotated data. MIL is a binary classification task in which any image with at least one flower falls into the positive class, and all others are negative. In the process, a novel loss function was developed that is designed to improve the performance of image-processing models that use MIL. The model is trained on a large dataset of cotton plant imagery which was collected over several years and will be made publicly available. Additionally, an active-learning-based approach is employed in order to generate the annotations for the dataset while minimizing the required amount of human intervention. Despite having minimal supervision, the model still demonstrates good performance on the testing dataset. Multiple models were tested with different numbers of parameters and input sizes, achieving a minimum average absolute count error of 2.43. Overall, this study demonstrates that a weakly-supervised model is a promising method for solving the flower counting problem while minimizing the human labeling effort.},
keywords = {Active learning, deep learning, High-throughput phenotyping, machine learning, Multiple-instance learning, Object counting},
pubstate = {published},
tppubtype = {article}
}
Xu, Rui; Li, Changying
A modular agricultural robotic system (MARS) for precision farming: Concept and implementation Journal Article
In: Journal of Field Robotics, vol. 39, no. 4, pp. 387-409, 2022.
Abstract | Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, phenotyping robot
@article{https://doi.org/10.1002/rob.22056,
title = {A modular agricultural robotic system (MARS) for precision farming: Concept and implementation},
author = {Rui Xu and Changying Li},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.22056},
doi = {https://doi.org/10.1002/rob.22056},
year = {2022},
date = {2022-01-01},
journal = {Journal of Field Robotics},
volume = {39},
number = {4},
pages = {387-409},
abstract = {Abstract Increasing global population, climate change, and shortage of labor pose significant challenges for meeting the global food and fiber demand, and agricultural robots offer a promising solution to these challenges. This paper presents a new robotic system architecture and the resulting modular agricultural robotic system (MARS) that is an autonomous, multi-purpose, and affordable robotic platform for in-field plant high throughput phenotyping and precision farming. There are five essential hardware modules (wheel module, connection module, robot controller, robot frame, and power module) and three optional hardware modules (actuation module, sensing module, and smart attachment). Various combinations of the hardware modules can create different robot configurations for specific agricultural tasks. The software was designed using the Robot Operating System (ROS) with three modules: control module, navigation module, and vision module. A robot localization method using dual Global Navigation Satellite System antennas was developed. Two line-following algorithms were implemented as the local planner for the ROS navigation stack. Based on the MARS design concept, two MARS designs were implemented: a low-cost, lightweight robotic system named MARS mini and a heavy-duty robot named MARS X. The autonomous navigation of both MARS X and mini was evaluated at different traveling speeds and payload levels, confirming satisfactory performances. The MARS X was further tested for its performance and navigation accuracy in a crop field, achieving a high accuracy over a 537 m long path with only 15% of the path having an error larger than 0.05 m. The MARS mini and MARS X were shown to be useful for plant phenotyping in two field tests. The modular design makes the robots easily adaptable to different agricultural tasks and the low-cost feature makes it affordable for researchers and growers.},
keywords = {agricultural robot, High-throughput phenotyping, phenotyping robot},
pubstate = {published},
tppubtype = {article}
}
Tan, Chenjiao; Li, Changying; He, Dongjian; Song, Huaibo
Towards real-time tracking and counting of seedlings with a one-stage detector and optical flow Journal Article
In: Computers and Electronics in Agriculture, vol. 193, pp. 106683, 2022, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: Cotton seedling, Counting, Deep convolutional neural network, deep learning, machine learning, object detection, Optical flow
@article{TAN2022106683,
title = {Towards real-time tracking and counting of seedlings with a one-stage detector and optical flow},
author = {Chenjiao Tan and Changying Li and Dongjian He and Huaibo Song},
url = {https://www.sciencedirect.com/science/article/pii/S0168169921007006},
doi = {https://doi.org/10.1016/j.compag.2021.106683},
issn = {0168-1699},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {193},
pages = {106683},
abstract = {The population of crop seedlings is important for breeders and growers to evaluate the emergence rate of different cultivars and the necessity of replanting, but manual counting of plant seedlings is time-consuming and tedious. Building upon our prior work, we advanced the cotton seedling tracking method by incorporating a one-stage object detection deep neural network and optical flow to improve tracking speed and counting accuracy. Videos of cotton seedlings were captured using consumer-grade video cameras from the top view. You Only Look Once Version 4 (YOLOv4), a one-stage object detection network, was trained to detect cotton seedlings in each frame and to generate bounding boxes. To associate the same seedlings between adjacent frames, an optical flow-based tracking method was adopted to estimate camera motions. By comparing the positions of bounding boxes predicted by optical flow and detected by the YOLOv4 network in the same frame, the number of cotton seedlings was updated. The trained YOLOv4 model achieved high accuracy under conditions of occlusions, blurry images, complex backgrounds, and extreme illuminations. The F1 score of the final detection model was 0.98 and the average precision was 99.12%. Important tracking metrics were compared to evaluate the tracking performance. The Multiple-Object Tracking Accuracy (MOTA) and ID switch of the proposed tracking method were 72.8% and 0.1%, respectively. Counting results showed that the relative error of all testing videos was 3.13%. Compared with the Kalman filter and particle filter-based methods, our optical flow-based method generated fewer errors on testing videos because of higher accuracy of motion estimation. Compared with our previous work, the RMSE of the optical flow-based method decreased by 0.54 and the counting speed increased from 2.5 to 10.8 frames per second. The counting speed can reach 16.6 frames per second if the input resolution was reduced to 1280 × 720 pixels with an only 0.45% reduction in counting accuracy. The proposed method provides an automatic and near real-time tracking approach for counting of multiple cotton seedlings in video frames with improved speed and accuracy, which will benefit plant breeding and precision crop management.},
keywords = {Cotton seedling, Counting, Deep convolutional neural network, deep learning, machine learning, object detection, Optical flow},
pubstate = {published},
tppubtype = {article}
}
Adke, Shrinidhi; Li, Changying; Rasheed, Khaled M.; Maier, Frederick W.
Supervised and Weakly Supervised Deep Learning for Segmentation and Counting of Cotton Bolls Using Proximal Imagery Journal Article
In: Sensors, vol. 22, no. 10, 2022, ISSN: 1424-8220.
Abstract | Links | BibTeX | Tags: deep learning, machine learning
@article{Adke2022,
title = {Supervised and Weakly Supervised Deep Learning for Segmentation and Counting of Cotton Bolls Using Proximal Imagery},
author = {Shrinidhi Adke and Changying Li and Khaled M. Rasheed and Frederick W. Maier},
url = {https://www.mdpi.com/1424-8220/22/10/3688},
doi = {10.3390/s22103688},
issn = {1424-8220},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Sensors},
volume = {22},
number = {10},
abstract = {The total boll count from a plant is one of the most important phenotypic traits for cotton breeding and is also an important factor for growers to estimate the final yield. With the recent advances in deep learning, many supervised learning approaches have been implemented to perform phenotypic trait measurement from images for various crops, but few studies have been conducted to count cotton bolls from field images. Supervised learning models require a vast number of annotated images for training, which has become a bottleneck for machine learning model development. The goal of this study is to develop both fully supervised and weakly supervised deep learning models to segment and count cotton bolls from proximal imagery. A total of 290 RGB images of cotton plants from both potted (indoor and outdoor) and in-field settings were taken by consumer-grade cameras and the raw images were divided into 4350 image tiles for further model training and testing. Two supervised models (Mask R-CNN and S-Count) and two weakly supervised approaches (WS-Count and CountSeg) were compared in terms of boll count accuracy and annotation costs. The results revealed that the weakly supervised counting approaches performed well with RMSE values of 1.826 and 1.284 for WS-Count and CountSeg, respectively, whereas the fully supervised models achieve RMSE values of 1.181 and 1.175 for S-Count and Mask R-CNN, respectively, when the number of bolls in an image patch is less than 10. In terms of data annotation costs, the weakly supervised approaches were at least 10 times more cost efficient than the supervised approach for boll counting. In the future, the deep learning models developed in this study can be extended to other plant organs, such as main stalks, nodes, and primary and secondary branches. Both the supervised and weakly supervised deep learning models for boll counting with low-cost RGB images can be used by cotton breeders, physiologists, and growers alike to improve crop breeding and yield estimation.},
keywords = {deep learning, machine learning},
pubstate = {published},
tppubtype = {article}
}
2021
Xu, Rui; Li, Changying; Bernardes, Sergio
Development and Testing of a UAV-Based Multi-Sensor System for Plant Phenotyping and Precision Agriculture Journal Article
In: Remote Sensing, vol. 13, no. 17, 2021, ISSN: 2072-4292.
Abstract | Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, robotics
@article{Xu2021,
title = {Development and Testing of a UAV-Based Multi-Sensor System for Plant Phenotyping and Precision Agriculture},
author = {Rui Xu and Changying Li and Sergio Bernardes},
url = {https://www.mdpi.com/2072-4292/13/17/3517},
doi = {10.3390/rs13173517},
issn = {2072-4292},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {Remote Sensing},
volume = {13},
number = {17},
abstract = {Unmanned aerial vehicles have been used widely in plant phenotyping and precision agriculture. Several critical challenges remain, however, such as the lack of cross-platform data acquisition software system, sensor calibration protocols, and data processing methods. This paper developed an unmanned aerial system that integrates three cameras (RGB, multispectral, and thermal) and a LiDAR sensor. Data acquisition software supporting data recording and visualization was implemented to run on the Robot Operating System. The design of the multi-sensor unmanned aerial system was open sourced. A data processing pipeline was proposed to preprocess the raw data and to extract phenotypic traits at the plot level, including morphological traits (canopy height, canopy cover, and canopy volume), canopy vegetation index, and canopy temperature. Protocols for both field and laboratory calibrations were developed for the RGB, multispectral, and thermal cameras. The system was validated using ground data collected in a cotton field. Temperatures derived from thermal images had a mean absolute error of 1.02 °C, and canopy NDVI had a mean relative error of 6.6% compared to ground measurements. The observed error for maximum canopy height was 0.1 m. The results show that the system can be useful for plant breeding and precision crop management.},
keywords = {agricultural robot, High-throughput phenotyping, robotics},
pubstate = {published},
tppubtype = {article}
}
Sun, Shangpeng; Li, Changying; Chee, Peng W.; Paterson, Andrew H.; Meng, Cheng; Zhang, Jingyi; Ma, Ping; Robertson, Jon S.; Adhikari, Jeevan
High resolution 3D terrestrial LiDAR for cotton plant main stalk and node detection Journal Article
In: Computers and Electronics in Agriculture, vol. 187, pp. 106276, 2021, ISSN: 0168-1699.
Abstract | Links | BibTeX | Tags: High-throughput phenotyping, LiDAR
@article{SUN2021106276,
title = {High resolution 3D terrestrial LiDAR for cotton plant main stalk and node detection},
author = {Shangpeng Sun and Changying Li and Peng W. Chee and Andrew H. Paterson and Cheng Meng and Jingyi Zhang and Ping Ma and Jon S. Robertson and Jeevan Adhikari},
url = {https://www.sciencedirect.com/science/article/pii/S0168169921002933},
doi = {https://doi.org/10.1016/j.compag.2021.106276},
issn = {0168-1699},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {Computers and Electronics in Agriculture},
volume = {187},
pages = {106276},
abstract = {Dense three-dimensional point clouds provide opportunities to retrieve detailed characteristics of plant organ-level phenotypic traits, which are helpful to better understand plant architecture leading to its improvements via new plant breeding approaches. In this study, a high-resolution terrestrial LiDAR was used to acquire point clouds of plants under field conditions, and a data processing pipeline was developed to detect plant main stalks and nodes, and then to extract two phenotypic traits including node number and main stalk length. The proposed method mainly consisted of three steps: first, extract skeletons from original point clouds using a Laplacian-based contraction algorithm; second, identify the main stalk by converting a plant skeleton point cloud to a graph; and third, detect nodes by finding the intersection between the main stalk and branches. Main stalk length was calculated by accumulating the distance between two adjacent points from the lowest to the highest point of the main stalk. Experimental results based on 26 plants showed that the proposed method could accurately measure plant main stalk length and detect nodes; the average R2 and mean absolute percentage error were 0.94 and 4.3% for the main stalk length measurements and 0.7 and 5.1% for node counting, respectively, for point numbers between 80,000 and 150,000 for each plant. Three-dimensional point cloud-based high throughput phenotyping may expedite breeding technologies to improve crop production.},
keywords = {High-throughput phenotyping, LiDAR},
pubstate = {published},
tppubtype = {article}
}
Virk, Simerjeet; Porter, Wesley; Snider, John; Rains, Glen; Li, Changying; Liu, Yangxuan
Cotton Emergence and Yield Response to Planter Depth and Downforce Settings in Different Soil Moisture Conditions Journal Article
In: AgriEngineering, vol. 3, no. 2, pp. 323–338, 2021, ISSN: 2624-7402.
Abstract | Links | BibTeX | Tags:
@article{Virk2021,
title = {Cotton Emergence and Yield Response to Planter Depth and Downforce Settings in Different Soil Moisture Conditions},
author = {Simerjeet Virk and Wesley Porter and John Snider and Glen Rains and Changying Li and Yangxuan Liu},
url = {https://www.mdpi.com/2624-7402/3/2/22},
doi = {10.3390/agriengineering3020022},
issn = {2624-7402},
year = {2021},
date = {2021-01-01},
journal = {AgriEngineering},
volume = {3},
number = {2},
pages = {323--338},
abstract = {US cotton producers are motivated to optimize planter performance to ensure timely and uniform stand establishment early in the season, especially when planting in sub-optimal field conditions. Field studies were conducted in 2017, 2018 and 2019 to evaluate the effect of seeding depth and planter downforce on crop emergence and yield in cotton planted in different soil moisture conditions. Field conditions representative of dry, normal and wet soil moisture conditions were attained by applying 0, 1.27 and 2.54 cm of irrigation within the same field. Two cotton cultivars (representing a small-seeded and a large-seeded cultivar, 9259–10,582 and 11,244–14,330 seeds kg−1, respectively), were planted at seeding depths of 1.3, 2.5 and 3.8 cm with each seeding depth paired with three different planter downforces of 0, 445 and 890 N in each block. Cotton was planted in plots that measured 3.66 m (four-rows) wide by 10.67 m long. Results indicated that crop emergence was affected by the seeding depth across most field conditions and higher crop emergence was observed in the large-seeded cultivar at 1.3 and 3.8 cm seeding depths in dry and wet field conditions, respectively. Lint yield was also higher for the large-seeded cultivar at the 3.8 cm seeding depth across all field conditions in 2017, and in dry field conditions in 2018. Planter downforce effect on crop emergence varied among the cultivars where the large-seeded cultivar exhibited higher crop emergence than the small-seeded cultivar at 445 and 890 N downforce. Planter downforce of 445 N yielded greater than the 0 and 890 N treatment in dry field conditions in 2017. The study results suggest that matching planter depth and downforce settings for prevalent soil moisture conditions at planting along with appropriate cultivar selection can help in achieving optimal emergence and yield in cotton.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Adke, Shrinidhi; von Mogel, Karl Haro; Jiang, Yu; Li, Changying
Instance Segmentation to Estimate Consumption of Corn Ears by Wild Animals for GMO Preference Tests Journal Article
In: Frontiers in Artificial Intelligence, vol. 3, 2021, ISSN: 2624-8212.
Abstract | Links | BibTeX | Tags:
@article{Adke2021,
title = {Instance Segmentation to Estimate Consumption of Corn Ears by Wild Animals for GMO Preference Tests},
author = {Shrinidhi Adke and Karl Haro von Mogel and Yu Jiang and Changying Li},
url = {https://www.frontiersin.org/article/10.3389/frai.2020.593622},
doi = {10.3389/frai.2020.593622},
issn = {2624-8212},
year = {2021},
date = {2021-01-01},
journal = {Frontiers in Artificial Intelligence},
volume = {3},
abstract = {The Genetically Modified (GMO) Corn Experiment was performed to test the hypothesis that wild animals prefer Non-GMO corn and avoid eating GMO corn, which resulted in the collection of complex image data of consumed corn ears. This study develops a deep learning-based image processing pipeline that aims to estimate the consumption of corn by identifying corn and its bare cob from these images, which will aid in testing the hypothesis in the GMO Corn Experiment. Ablation uses mask regional convolutional neural network (Mask R-CNN) for instance segmentation. Based on image data annotation, two approaches for segmentation were discussed: identifying whole corn ears and bare cob parts with and without corn kernels. The Mask R-CNN model was trained for both approaches and segmentation results were compared. Out of the two, the latter approach, i.e., without the kernel, was chosen to estimate the corn consumption because of its superior segmentation performance and estimation accuracy. Ablation experiments were performed with the latter approach to obtain the best model with the available data. The estimation results of these models were included and compared with manually labeled test data with R^{2} = 0.99 which showed that use of the Mask R-CNN model to estimate corn consumption provides highly accurate results, thus, allowing it to be used further on all collected data and help test the hypothesis of the GMO Corn Experiment. These approaches may also be applied to other plant phenotyping tasks (e.g., yield estimation and plant stress quantification) that require instance segmentation.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ni, Xueping; Li, Changying; Jiang, Huanyu; Takeda, Fumiomi
Three-dimensional photogrammetry with deep learning instance segmentation to extract berry fruit harvestability traits Journal Article
In: ISPRS Journal of Photogrammetry and Remote Sensing, vol. 171, pp. 297-309, 2021, ISSN: 0924-2716.
Abstract | Links | BibTeX | Tags: 2D-3D projection, 3D reconstruction, Blueberry traits, deep learning, machine learning, mask R-CNN
@article{NI2021297,
title = {Three-dimensional photogrammetry with deep learning instance segmentation to extract berry fruit harvestability traits},
author = {Xueping Ni and Changying Li and Huanyu Jiang and Fumiomi Takeda},
url = {https://www.sciencedirect.com/science/article/pii/S0924271620303178},
doi = {https://doi.org/10.1016/j.isprsjprs.2020.11.010},
issn = {0924-2716},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {ISPRS Journal of Photogrammetry and Remote Sensing},
volume = {171},
pages = {297-309},
abstract = {Fruit cluster characteristics such as compactness, maturity, berry number, and berry size, are important phenotypic traits associated with harvestability and yield of blueberry genotypes and can be used to monitor berry development and improve crop management. The goal of this study was to develop a complete framework of 3D segmentation for individual blueberries as they develop in clusters and to extract blueberry cluster traits. To achieve this goal, an image-capturing system was developed to capture blueberry images to facilitate 3D reconstruction and a 2D-3D projection-based photogrammetric pipeline was proposed to extract berry cluster traits. The reconstruction was performed for four southern highbush blueberry cultivars (‘Emerald’, ‘Farthing’, ‘Meadowlark’ and ‘Star’) with 10 cluster samples for each cultivar based on photogrammetry. A minimum bounding box was created to surround a 3D blueberry cluster to calculate compactness as the ratio of berry volume and minimum bounding box volume. Mask R-CNN was used to segment individual blueberries with the maturity property from 2D images and the instance masks were projected onto 3D point clouds to establish 2D-3D correspondences. The developed trait extraction algorithm was used to segment individual 3D blueberries to obtain berry number, individual berry volume, and berry maturity. Berry maturity was used to calculate cluster maturity as the ratio of the mature berry (blue colored fruit) number and the total berry (blue, reddish, and green colored fruit) number comprising the cluster. The accuracy of determining the fruit number in a cluster is 97.3%. The linear regression for cluster maturity has a R2 of 0.908 with a RMSE of 0.068. The cluster berry volume has a RMSE of 2.92 cm3 compared with the ground truth, indicating that the individual berry volume has an error of less than 0.292 cm3 for clusters with a berry number greater than 10. The statistical analyses of the traits for the four cultivars reveals that, in the middle of April, ‘Emerald’ and ‘Farthing’ were more compact than ‘Meadowlark’ and ‘Star’, and the mature berry volume of ‘Farthing’ was greater than ‘Emerald’ and ‘Meadowlark’, while ‘Star’ had the smallest mature berry size. This study develops an effective method based on 3D photogrammetry and 2D instance segmentation that can determine blueberry cluster traits accurately from a large number of samples and can be used for fruit development monitoring, yield estimation, and harvest time prediction.},
keywords = {2D-3D projection, 3D reconstruction, Blueberry traits, deep learning, machine learning, mask R-CNN},
pubstate = {published},
tppubtype = {article}
}
2020
Adke, S.; Mogel, K. H. Von; Jiang, Y.; Li, C.
Instance Segmentation to Estimate Consumption of Corn Ears by Wild Animals for GMO Preference Tests Journal Article
In: Frontiers in Artificial Intelligence, vol. 3, no. 119, 2020.
Links | BibTeX | Tags: deep learning, machine learning, mask R-CNN
@article{adke2020instane,
title = {Instance Segmentation to Estimate Consumption of Corn Ears by Wild Animals for GMO Preference Tests },
author = {S. Adke and K.H. Von Mogel and Y. Jiang and C. Li},
url = {https://www.frontiersin.org/articles/10.3389/frai.2020.593622/abstract},
year = {2020},
date = {2020-12-30},
urldate = {2020-12-30},
journal = {Frontiers in Artificial Intelligence},
volume = {3},
number = {119},
keywords = {deep learning, machine learning, mask R-CNN},
pubstate = {published},
tppubtype = {article}
}
Jiang, Y.; Li, C.; Xu, R.; Sun, S.; Robertson, J. S.; Paterson, A. H.
DeepFlower: a deep learning-based approach to characterize flowering patterns of cotton plants in the field Journal Article
In: Plant Methods, vol. 16, no. 156, 2020.
Links | BibTeX | Tags: deep learning, machine learning
@article{jiang2020deepflower,
title = {DeepFlower: a deep learning-based approach to characterize flowering patterns of cotton plants in the field},
author = {Y. Jiang and C. Li and R. Xu and S. Sun and J. S. Robertson and A.H. Paterson},
url = {https://plantmethods.biomedcentral.com/articles/10.1186/s13007-020-00698-y
https://doi.org/10.1186/s13007-020-00698-y},
year = {2020},
date = {2020-12-07},
urldate = {2020-12-07},
journal = {Plant Methods},
volume = {16},
number = {156},
keywords = {deep learning, machine learning},
pubstate = {published},
tppubtype = {article}
}
Iqbal, Jawad; Xu, Rui; Halloran, Hunter; Li, Changying
Development of a Multi-Purpose Autonomous Differential Drive Mobile Robot for Plant Phenotyping and Soil Sensing Journal Article
In: Electronics, vol. 9, no. 9, pp. 1550, 2020.
Links | BibTeX | Tags: agricultural robot, mobile, phenotyping robot, robotics
@article{iqbal2020maria,
title = {Development of a Multi-Purpose Autonomous Differential Drive Mobile Robot for Plant Phenotyping and Soil Sensing},
author = {Jawad Iqbal and Rui Xu and Hunter Halloran and Changying Li },
url = {https://www.mdpi.com/2079-9292/9/9/1550},
year = {2020},
date = {2020-09-15},
urldate = {2020-09-15},
journal = {Electronics},
volume = {9},
number = {9},
pages = {1550},
keywords = {agricultural robot, mobile, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
Ni, X.; Li, C.; Jiang, H.; Takeda., F.
Deep learning image segmentation and extraction of blueberry fruit traits associated with harvestability and yield Journal Article
In: Horticulture Research, vol. 7, no. 1, pp. 1-14, 2020.
Links | BibTeX | Tags: deep learning, machine learning
@article{Ni2020,
title = {Deep learning image segmentation and extraction of blueberry fruit traits associated with harvestability and yield},
author = {Ni, X. and C. Li and H. Jiang and F. Takeda. },
url = {https://www.nature.com/articles/s41438-020-0323-3},
year = {2020},
date = {2020-07-01},
urldate = {2020-07-01},
journal = {Horticulture Research},
volume = {7},
number = {1},
pages = {1-14},
keywords = {deep learning, machine learning},
pubstate = {published},
tppubtype = {article}
}
Iqbal, Jawad; Xu, Rui; Sun, Shangpeng; Li, Changying
Simulation of an autonomous mobile robot for LiDAR-based in-field phenotyping and navigation Journal Article
In: Robotics, vol. 9, no. 2, pp. 46, 2020.
BibTeX | Tags: LiDAR, robotics, simulation
@article{Iqbal2020,
title = {Simulation of an autonomous mobile robot for LiDAR-based in-field phenotyping and navigation},
author = {Jawad Iqbal and Rui Xu and Shangpeng Sun and Changying Li},
year = {2020},
date = {2020-06-15},
journal = {Robotics},
volume = {9},
number = {2},
pages = {46},
keywords = {LiDAR, robotics, simulation},
pubstate = {published},
tppubtype = {article}
}
Jiang, Yu; Li, Changying
Convolutional neural networks for image-based high throughput plant phenotyping: A review Journal Article
In: Plant Phenomics, vol. 2020, no. 4152816, 2020.
Links | BibTeX | Tags: CNN, deep learning, machine learning, review
@article{Yu2020,
title = {Convolutional neural networks for image-based high throughput plant phenotyping: A review},
author = {Yu Jiang and Changying Li},
url = {https://spj.sciencemag.org/journals/plantphenomics/2020/4152816/},
doi = {https://doi.org/10.34133/2020/4152816.},
year = {2020},
date = {2020-02-20},
urldate = {2020-02-20},
journal = {Plant Phenomics},
volume = {2020},
number = {4152816},
keywords = {CNN, deep learning, machine learning, review},
pubstate = {published},
tppubtype = {article}
}
Jiang, Y.; Snider, J. L.; Li, C.; Rains, G. C.; Paterson, A. H.
Ground based hyperspectral imaging to characterize canopy-level photosynthetic activities Journal Article
In: Remote Sensing, vol. 12, no. 2, pp. 315, 2020.
BibTeX | Tags: hyperspectral, SIF
@article{Jiang2020,
title = {Ground based hyperspectral imaging to characterize canopy-level photosynthetic activities},
author = {Jiang, Y. and Snider, J. L. and Li, C. and Rains, G. C. and Paterson, A. H. },
year = {2020},
date = {2020-02-01},
journal = {Remote Sensing},
volume = {12},
number = {2},
pages = {315},
keywords = {hyperspectral, SIF},
pubstate = {published},
tppubtype = {article}
}
Zhang, M.; Jiang, Y.; Li, C.; Yang, F.
Fully convolutional networks for blueberry bruising and calyx segmentation using hyperspectral transmittance imaging Journal Article
In: Biosystems Engineering, vol. 192, pp. 159-175, 2020.
Links | BibTeX | Tags: deep learning, hyperspectral, machine learning
@article{Zhang2019b,
title = {Fully convolutional networks for blueberry bruising and calyx segmentation using hyperspectral transmittance imaging },
author = {M. Zhang and Y. Jiang and C. Li and F. Yang},
url = {https://www.sciencedirect.com/science/article/pii/S1537511020300301?dgcid=author},
doi = {https://doi.org/10.1016/j.biosystemseng.2020.01.018},
year = {2020},
date = {2020-01-27},
urldate = {2020-01-27},
journal = {Biosystems Engineering},
volume = {192},
pages = {159-175},
keywords = {deep learning, hyperspectral, machine learning},
pubstate = {published},
tppubtype = {article}
}
Sun, S.; Li, C.; Chee, P.; Patersona, A.; Jiang, Y.; Xu, R.; Robertson, J.; Adhikari, J.; Shehzad, T.
Three-dimensional photogrammetric mapping of cotton bolls in situ based on point cloud segmentation and clustering Journal Article
In: ISPRS Journal of Photogrammetry and Remote Sensing, vol. 160 , pp. 195-207, 2020.
@article{Sun2019,
title = {Three-dimensional photogrammetric mapping of cotton bolls in situ based on point cloud segmentation and clustering},
author = {S. Sun and C. Li and P. Chee and A. Patersona and Y. Jiang and R. Xu and J. Robertson and J. Adhikari and T. Shehzad},
url = {https://www.sciencedirect.com/science/article/abs/pii/S0924271619302990?via%3Dihub},
year = {2020},
date = {2020-01-01},
journal = {ISPRS Journal of Photogrammetry and Remote Sensing},
volume = {160 },
pages = {195-207},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lu, Renfu; Beers, Robbe Van; Saeys, Wouter; Li, Changying; Cen, Haiyan
Measurement of optical properties of fruits and vegetables: A review Journal Article
In: Postharvest Biology and Technology, vol. 159, pp. 111003, 2020, ISSN: 0925-5214.
@article{RN27,
title = {Measurement of optical properties of fruits and vegetables: A review},
author = {Renfu Lu and Robbe Van Beers and Wouter Saeys and Changying Li and Haiyan Cen},
url = {https://www.sciencedirect.com/science/article/pii/S0925521419300870},
issn = {0925-5214},
year = {2020},
date = {2020-01-01},
journal = {Postharvest Biology and Technology},
volume = {159},
pages = {111003},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2019
Jiang, Y.; Li, C.; Paterson, A.; Robertson, J.
DeepSeedling: Deep convolutional network and Kalman filter for plant seedling detection and counting in the field Journal Article
In: Plant Methods, vol. 15, no. 1, pp. 141, 2019.
Links | BibTeX | Tags: deep learning, machine learning
@article{Jiang2019,
title = {DeepSeedling: Deep convolutional network and Kalman filter for plant seedling detection and counting in the field },
author = { Y. Jiang and C. Li and A. Paterson and J. Robertson},
url = {https://plantmethods.biomedcentral.com/articles/10.1186/s13007-019-0528-3#citeas},
doi = {https://doi.org/10.1186/s13007-019-0528-3},
year = {2019},
date = {2019-11-23},
urldate = {2019-11-23},
journal = {Plant Methods},
volume = {15},
number = {1},
pages = {141},
keywords = {deep learning, machine learning},
pubstate = {published},
tppubtype = {article}
}
Sun, S.; Li, C.; Paterson, A.; Chee, P.
Image processing algorithms for infield single cotton boll counting and yield prediction Journal Article
In: Computers and Electronics in Agriculture, 2019.
Abstract | Links | BibTeX | Tags:
@article{Sun2019b,
title = {Image processing algorithms for infield single cotton boll counting and yield prediction},
author = { S. Sun and C. Li and A. Paterson and P. Chee},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Image-processing-algorithms-for-infield-single-cotton-boll-counting-and-yield-prediction-1.pdf},
year = {2019},
date = {2019-09-12},
journal = {Computers and Electronics in Agriculture},
abstract = {Sun, S., Li, C., Paterson, A. H., Chee, P. W., & Robertson, J. S. (2019). Image processing algorithms for infield single cotton boll counting and yield prediction. Computers and Electronics in Agriculture, 166, 104976.
Cotton boll number is an important component of fiber yield, arguably the most important phenotypic trait to plant breeders and growers alike. In addition, boll number provides a better understanding on the physiological and genetic mechanisms of crop growth and development, facilitating timely decisions on crop management to maximize profit. Traditional in-field cotton boll number counting by visual inspection is time consuming and labor-intensive. In this work, we presented novel image processing algorithms for automatic single cotton boll recognition and counting under natural illumination in the field. A digital camera mounted on a robot platform was used to acquire images with a 45° downward angle on three different days before harvest. A double thresholding with region growth algorithm combining color and spatial features was applied to segment bolls from background, and three geometric-feature-based algorithms were developed to estimate boll number. Line features detected by linear Hough Transform and the minimum boundary distance between two regions were used to merge disjointed regions split by branches and burrs, respectively. The area and the elongation ratio between major and minor axes were used to separate bolls overlapping in clusters. A total of 210 images captured
under sunny and cloudy illumination conditions on three days were used to validate the performance of
the cotton boll recognition method, with an F1 score of around 0.98; whereas, the best accuracy for boll counting was around 84.6%. At the whole plot level, fifteen plots were used to build a linear regression model between the estimated boll number and the overall fiber yield with a R2 value of 0.53. The performance was evaluated by another ten plots with a mean absolute percentage error of 8.92% and a root mean square error of 99 g. The methodology developed in this study provides a means to estimate cotton boll number from color images under field conditions and would be helpful to predict crop yield and understand genetic mechanisms of crop growth.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Cotton boll number is an important component of fiber yield, arguably the most important phenotypic trait to plant breeders and growers alike. In addition, boll number provides a better understanding on the physiological and genetic mechanisms of crop growth and development, facilitating timely decisions on crop management to maximize profit. Traditional in-field cotton boll number counting by visual inspection is time consuming and labor-intensive. In this work, we presented novel image processing algorithms for automatic single cotton boll recognition and counting under natural illumination in the field. A digital camera mounted on a robot platform was used to acquire images with a 45° downward angle on three different days before harvest. A double thresholding with region growth algorithm combining color and spatial features was applied to segment bolls from background, and three geometric-feature-based algorithms were developed to estimate boll number. Line features detected by linear Hough Transform and the minimum boundary distance between two regions were used to merge disjointed regions split by branches and burrs, respectively. The area and the elongation ratio between major and minor axes were used to separate bolls overlapping in clusters. A total of 210 images captured
under sunny and cloudy illumination conditions on three days were used to validate the performance of
the cotton boll recognition method, with an F1 score of around 0.98; whereas, the best accuracy for boll counting was around 84.6%. At the whole plot level, fifteen plots were used to build a linear regression model between the estimated boll number and the overall fiber yield with a R2 value of 0.53. The performance was evaluated by another ten plots with a mean absolute percentage error of 8.92% and a root mean square error of 99 g. The methodology developed in this study provides a means to estimate cotton boll number from color images under field conditions and would be helpful to predict crop yield and understand genetic mechanisms of crop growth.
Jiang, Y.; Li, C.; Takeda, F.; Kramer, E. A.; Ashrafi, H.; Hunter, J.
3D point cloud data to quantitatively characterize size and shape of shrub crops Journal Article
In: Horticulture Research, vol. 6, no. 43, 2019.
Abstract | Links | BibTeX | Tags:
@article{Jiang2018,
title = {3D point cloud data to quantitatively characterize size and shape of shrub crops},
author = {Y. Jiang and C. Li and F. Takeda and E.A. Kramer and H. Ashrafi and J. Hunter},
url = {https://www.nature.com/articles/s41438-019-0123-9
http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/3D-point-cloud-data-to-quantitatively-characterize-size-and-shape-of-shrub-crops-1.pdf},
year = {2019},
date = {2019-04-06},
journal = {Horticulture Research},
volume = {6},
number = {43},
abstract = {Jiang, Y., Li, C., Takeda, F., Kramer, E. A., Ashrafi, H., & Hunter, J. (2019). 3D point cloud data to quantitatively characterize size and shape of shrub crops. Horticulture research, 6(1), 43.
This paper demonstrates the application of aerial multispectral images in cotton plant phenotyping. Four phenotypic traits (plant height, canopy cover, vegetation index, and flower) were measured from multispectral images captured by a multispectral camera on an unmanned aerial system. Data were collected on eight different days from two fields. Ortho-mosaic and digital elevation models (DEM) were constructed from the raw images using the structure from motion (SfM) algorithm. A data processing pipeline was developed to calculate plant height using the ortho-mosaic and DEM. Six ground calibration targets (GCTs) were used to correct the error of the calculated plant height caused by the georeferencing error of the DEM. Plant heights were measured manually to validate the heights predicted from the imaging method. The error in estimation of the maximum height of each plot ranged from -40.4 to 13.5 cm among six datasets, all of which showed strong linear relationships with the manual measurement (R2 > 0.89). Plot canopy was separated from the soil based on the DEM and normalized differential vegetation index (NDVI). Canopy cover and mean canopy NDVI were calculated to show canopy growth over time and the correlation between the two indices was investigated. The spectral responses of the ground, leaves, cotton flower, and ground shade were analyzed and detection of cotton flowers was satisfactory using a support vector machine (SVM). This study demonstrated the potential of using aerial multispectral images for high throughput phenotyping of important cotton phenotypic traits in the field.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
This paper demonstrates the application of aerial multispectral images in cotton plant phenotyping. Four phenotypic traits (plant height, canopy cover, vegetation index, and flower) were measured from multispectral images captured by a multispectral camera on an unmanned aerial system. Data were collected on eight different days from two fields. Ortho-mosaic and digital elevation models (DEM) were constructed from the raw images using the structure from motion (SfM) algorithm. A data processing pipeline was developed to calculate plant height using the ortho-mosaic and DEM. Six ground calibration targets (GCTs) were used to correct the error of the calculated plant height caused by the georeferencing error of the DEM. Plant heights were measured manually to validate the heights predicted from the imaging method. The error in estimation of the maximum height of each plot ranged from -40.4 to 13.5 cm among six datasets, all of which showed strong linear relationships with the manual measurement (R2 > 0.89). Plot canopy was separated from the soil based on the DEM and normalized differential vegetation index (NDVI). Canopy cover and mean canopy NDVI were calculated to show canopy growth over time and the correlation between the two indices was investigated. The spectral responses of the ground, leaves, cotton flower, and ground shade were analyzed and detection of cotton flowers was satisfactory using a support vector machine (SVM). This study demonstrated the potential of using aerial multispectral images for high throughput phenotyping of important cotton phenotypic traits in the field.
Zhang, M.; Li, C.; Yang, F.
Optical properties of blueberry flesh and skin and Monte Carlo multi-layered simulation of light interaction with fruit tissues Journal Article
In: Postharvest Biology and Technology, vol. 150, pp. 28-41, 2019.
Abstract | Links | BibTeX | Tags:
@article{Zhang201901,
title = {Optical properties of blueberry flesh and skin and Monte Carlo multi-layered simulation of light interaction with fruit tissues},
author = {M. Zhang and C. Li and F. Yang },
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Optical-properties-of-blueberry-flesh-and-skin-and-Monte-Carlo-multi-layered-simulation-of-light-interaction-with-fruit-tissues.pdf},
doi = {https://doi.org/10.1016/j.postharvbio.2018.12.006},
year = {2019},
date = {2019-03-31},
journal = {Postharvest Biology and Technology},
volume = {150},
pages = {28-41},
abstract = {Zhang, M., Li, C., & Yang, F. (2019). Optical properties of blueberry flesh and skin and Monte Carlo multi-layered simulation of light interaction with fruit tissues. Postharvest Biology and Technology, 150, 28-41.
One of the major issues of fresh blueberry production is the excessive bruising caused by mechanical impact during harvesting and packaging, which substantially lowers fruit quality and therefore compromises consumer satisfaction as well as the profitability for growers. Spectroscopy and imaging techniques such as hyperspectral imaging have great potential to detect and quantify internal bruises in fruit. It is important to measure the fundamental optical properties of blueberry tissues to better employ spectroscopy or imaging techniques. The aim of this study was to obtain the absorption coefficient (μa), reduced scattering coefficient (μ′ s), and scattering anisotropy (g) of blueberry flesh and skin in the spectral regions of 500–800 nm and 930–1400 nm and investigate the light propagation model of blueberries using Monte Carlo multi-layered (MCML) simulation. The total reflectance, total transmittance, and collimated transmittance of blueberry flesh and skin with three treatments (non-bruised, 30-min bruised, and 24-h bruised) were collected using a single integrating spherebased spectroscopic system. Using the collected spectra, the inverse adding-doubling (IAD) method was applied to calculate μa, μ′
s , and g. Results indicated that the differences between bruised (30 min and 24 h) and nonbruised flesh samples for both μ′ s and g were significant from 930 nm to 1400 nm. Microscope images revealed that the differences were caused by the damaged and ruptured cellular structure of bruised flesh. Although μa, μ′ s , and g showed significant differences between non-bruised and bruised (30 min and 24 h) flesh in the spectral region of 400–700 nm, the MCML simulation results showed that this spectral region is not effective in detecting bruises due to strong absorption and backward scattering of the blueberry skin. In contrast, the absorption effect of the skin in the near infrared range (930–1400 nm) was small, allowing light to penetrate and interact with the flesh. Therefore, the near infrared spectral region is an effective spectral range for inspecting bruised blueberries using either reflectance or transmittance method. This study reported the optical properties of blueberry skin and flesh with varying degree of bruising for the first time and simulated photon interaction with fruit tissues for bruising detection using MCML. These findings would provide guidance to develop non-destructive sensing methods for blueberry internal bruising detection.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
One of the major issues of fresh blueberry production is the excessive bruising caused by mechanical impact during harvesting and packaging, which substantially lowers fruit quality and therefore compromises consumer satisfaction as well as the profitability for growers. Spectroscopy and imaging techniques such as hyperspectral imaging have great potential to detect and quantify internal bruises in fruit. It is important to measure the fundamental optical properties of blueberry tissues to better employ spectroscopy or imaging techniques. The aim of this study was to obtain the absorption coefficient (μa), reduced scattering coefficient (μ′ s), and scattering anisotropy (g) of blueberry flesh and skin in the spectral regions of 500–800 nm and 930–1400 nm and investigate the light propagation model of blueberries using Monte Carlo multi-layered (MCML) simulation. The total reflectance, total transmittance, and collimated transmittance of blueberry flesh and skin with three treatments (non-bruised, 30-min bruised, and 24-h bruised) were collected using a single integrating spherebased spectroscopic system. Using the collected spectra, the inverse adding-doubling (IAD) method was applied to calculate μa, μ′
s , and g. Results indicated that the differences between bruised (30 min and 24 h) and nonbruised flesh samples for both μ′ s and g were significant from 930 nm to 1400 nm. Microscope images revealed that the differences were caused by the damaged and ruptured cellular structure of bruised flesh. Although μa, μ′ s , and g showed significant differences between non-bruised and bruised (30 min and 24 h) flesh in the spectral region of 400–700 nm, the MCML simulation results showed that this spectral region is not effective in detecting bruises due to strong absorption and backward scattering of the blueberry skin. In contrast, the absorption effect of the skin in the near infrared range (930–1400 nm) was small, allowing light to penetrate and interact with the flesh. Therefore, the near infrared spectral region is an effective spectral range for inspecting bruised blueberries using either reflectance or transmittance method. This study reported the optical properties of blueberry skin and flesh with varying degree of bruising for the first time and simulated photon interaction with fruit tissues for bruising detection using MCML. These findings would provide guidance to develop non-destructive sensing methods for blueberry internal bruising detection.
Xu, R.; Li, C.; Paterson, A. H.
Multispectral imaging and unmanned aerial systems for cotton plant phenotyping Journal Article
In: PLoS One, no. 0205083, 2019.
Abstract | Links | BibTeX | Tags: agricultural robot, robotics
@article{Xu201901,
title = {Multispectral imaging and unmanned aerial systems for cotton plant phenotyping},
author = {R. Xu and C. Li and A.H. Paterson},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Multispectral-imaging-and-unmanned-aerial-systems-for-cotton-plant-phenotyping.pdf},
doi = {https://doi.org/10.1371/journal.pone.0205083},
year = {2019},
date = {2019-02-27},
urldate = {2019-02-27},
journal = {PLoS One},
number = {0205083},
abstract = {Xu, R., Li, C., & Paterson, A. H. (2019). Multispectral imaging and unmanned aerial systems for cotton plant phenotyping. PloS one, 14(2), e0205083.
Size and shape are important properties of shrub crops such as blueberries, and they can be particularly useful for evaluating bush architecture suited to mechanical harvesting. The overall goal of this study was to develop a 3D imaging approach to measure size-related traits and bush shape that are relevant to mechanical harvesting. 3D point clouds were acquired for 367 bushes from five genotype groups. Point cloud data were preprocessed to obtain clean bush points for characterizing bush architecture, including bush morphology (height, width, and volume), crown size, and shape descriptors (path curve λ and five shape indices). One-dimensional traits (height, width, and crown size) had high correlations (R2 = 0.88–0.95) between proposed method and manual measurements, whereas bush volume showed relatively lower correlations (R2 = 0.78–0.85). These correlations suggested that the present approach was accurate in measuring one-dimensional size traits and acceptable in estimating three-dimensional bush volume. Statistical results demonstrated that the five genotype groups were statistically different in crown size and bush shape. The differences matched with human evaluation regarding optimal bush architecture for mechanical harvesting. In particular, a visualization tool could be generated using crown size and path curve λ, which showed great potential of determining bush architecture suitable for mechanical harvesting quickly. Therefore, the processing pipeline of 3D point cloud data presented in this study is an effective tool for blueberry breeding programs (in particular for mechanical harvesting) and farm management.},
keywords = {agricultural robot, robotics},
pubstate = {published},
tppubtype = {article}
}
Size and shape are important properties of shrub crops such as blueberries, and they can be particularly useful for evaluating bush architecture suited to mechanical harvesting. The overall goal of this study was to develop a 3D imaging approach to measure size-related traits and bush shape that are relevant to mechanical harvesting. 3D point clouds were acquired for 367 bushes from five genotype groups. Point cloud data were preprocessed to obtain clean bush points for characterizing bush architecture, including bush morphology (height, width, and volume), crown size, and shape descriptors (path curve λ and five shape indices). One-dimensional traits (height, width, and crown size) had high correlations (R2 = 0.88–0.95) between proposed method and manual measurements, whereas bush volume showed relatively lower correlations (R2 = 0.78–0.85). These correlations suggested that the present approach was accurate in measuring one-dimensional size traits and acceptable in estimating three-dimensional bush volume. Statistical results demonstrated that the five genotype groups were statistically different in crown size and bush shape. The differences matched with human evaluation regarding optimal bush architecture for mechanical harvesting. In particular, a visualization tool could be generated using crown size and path curve λ, which showed great potential of determining bush architecture suitable for mechanical harvesting quickly. Therefore, the processing pipeline of 3D point cloud data presented in this study is an effective tool for blueberry breeding programs (in particular for mechanical harvesting) and farm management.
2018
Fan, S.; Li, C.; Huang, W.; Chen, L.
Data fusion of two hyperspectral imaging systems with complementary spectral sensing ranges for blueberry bruising detection Journal Article
In: Sensors, vol. 18, no. 12, pp. 4463, 2018.
Abstract | Links | BibTeX | Tags: hyperspectral
@article{Fan201801,
title = {Data fusion of two hyperspectral imaging systems with complementary spectral sensing ranges for blueberry bruising detection},
author = {S. Fan and C. Li and W. Huang and L. Chen},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Data-fusion-of-two-hyperspectral-imaging-systems-with-complementary-spectral-sensing-ranges-for-blueberry-bruising-detection.pdf},
doi = {https://doi.org/10.3390/s18124463},
year = {2018},
date = {2018-12-17},
urldate = {2018-12-17},
journal = {Sensors},
volume = {18},
number = {12},
pages = {4463},
abstract = {Fan, S., Li, C., Huang, W., & Chen, L. (2018). Data fusion of two hyperspectral imaging systems with complementary spectral sensing ranges for blueberry bruising detection. Sensors, 18(12), 4463.
Currently, the detection of blueberry internal bruising focuses mostly on single hyperspectral imaging (HSI) systems. Attempts to fuse different HSI systems with complementary spectral ranges are still lacking. A push broom based HSI system and a liquid crystal tunable filter (LCTF) based HSI system with different sensing ranges and detectors were investigated to jointly detect blueberry internal bruising in the lab. The mean reflectance spectrum of each berry sample was extracted from the data obtained by two HSI systems respectively. The spectral data from the two spectroscopic techniques were analyzed separately using feature selection method, partial least squares-discriminant analysis (PLS-DA), and support vector machine (SVM), and then fused with three data fusion strategies at the data level, feature level, and decision level. The three data fusion strategies achieved better classification results than using each HSI system alone. The decision level fusion integrating classification results from the two instruments with selected relevant features achieved more promising results, suggesting that the two HSI systems with complementary spectral ranges, combined with feature selection and data fusion strategies, could be used synergistically to improve blueberry internal bruising detection. This study was the first step in demonstrating the feasibility of the fusion of two HSI systems with complementary spectral ranges for detecting blueberry bruising, which could lead to a multispectral imaging system with a few selected wavelengths and an appropriate detector for bruising detection on the packing line.},
keywords = {hyperspectral},
pubstate = {published},
tppubtype = {article}
}
Currently, the detection of blueberry internal bruising focuses mostly on single hyperspectral imaging (HSI) systems. Attempts to fuse different HSI systems with complementary spectral ranges are still lacking. A push broom based HSI system and a liquid crystal tunable filter (LCTF) based HSI system with different sensing ranges and detectors were investigated to jointly detect blueberry internal bruising in the lab. The mean reflectance spectrum of each berry sample was extracted from the data obtained by two HSI systems respectively. The spectral data from the two spectroscopic techniques were analyzed separately using feature selection method, partial least squares-discriminant analysis (PLS-DA), and support vector machine (SVM), and then fused with three data fusion strategies at the data level, feature level, and decision level. The three data fusion strategies achieved better classification results than using each HSI system alone. The decision level fusion integrating classification results from the two instruments with selected relevant features achieved more promising results, suggesting that the two HSI systems with complementary spectral ranges, combined with feature selection and data fusion strategies, could be used synergistically to improve blueberry internal bruising detection. This study was the first step in demonstrating the feasibility of the fusion of two HSI systems with complementary spectral ranges for detecting blueberry bruising, which could lead to a multispectral imaging system with a few selected wavelengths and an appropriate detector for bruising detection on the packing line.
Ozturk, S.; Kong, F.; Singh, R. K.; Kuzy, J. D.; Li, C.; Trabelsi, S.
Dielectric properties, heating rate, and heating uniformity of various seasoning spices and their mixtures with radio frequency heating Journal Article
In: Journal of Food Engineering, vol. 228, pp. 128-141, 2018.
Abstract | Links | BibTeX | Tags:
@article{Ozturk201801,
title = {Dielectric properties, heating rate, and heating uniformity of various seasoning spices and their mixtures with radio frequency heating},
author = {S. Ozturk and F. Kong and R. K. Singh and J. D. Kuzy and C. Li and S. Trabelsi},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Dielectric-properties-heating-rate-and-heating-uniformity-of-various-seasoning-spices-and-their-mixtures-with-radio-frequency-heating.pdf},
doi = {https://doi.org/10.1016/j.jfoodeng.2018.02.011},
year = {2018},
date = {2018-07-01},
journal = {Journal of Food Engineering},
volume = {228},
pages = {128-141},
abstract = {Ozturk, S., Kong, F., Singh, R. K., Kuzy, J. D., Li, C., & Trabelsi, S. (2018). Dielectric properties, heating rate, and heating uniformity of various seasoning spices and their mixtures with radio frequency heating. Journal of food engineering, 228, 128-141.
Low moisture foods, including seasoning spices, have been associated with a number of multi-state outbreaks of salmonellosis in the past decade. The long-term objective of this study was to develop an effective in-package pasteurization treatment for seasoning mixtures based on radio frequency (RF) heating. Seasoning spices obtained from grocery stores included red, white, and black pepper; cumin; curry powder; and garlic powder with moisture contents ranging from 3.1 to 12.3% (wet basis). The dielectric properties (DP) of the seasoning spices and their mixtures as influenced by frequency, moisture, temperature, mixing fraction and salt content were determined using a precision LCR meter and liquid test fixture at frequency ranging from 1 to 30 MHz. The RF heating rates of each spice and their mixtures were evaluated using a 27.12-MHz, 6-kW pilot scale RF system with 105 mm gap between electrodes. To evaluate the effect of mixing on heating uniformity, a sample (50 g) was placed into a polystyrene plastic cylindrical container and heated to 70 C, and surface images were taken by an infrared camera. The results showed that the relationship among moisture content, temperature and DP of white pepper can be explained by a second-order model at 13.56 and 27.12 MHz. The DP and heating rates of spice mixtures ranged between the highest and lowest values of their respective individual spices. Increase in salt content resulted in a decrease in heating rate, which resulted a better heating uniformity with smaller uniformity index (UI). The RF heating rate of samples ranged from 2.97 to 23.61 (C min1). The highest heating rate in samples was in a correspondence to the worst heating uniformity, and highest average temperature on the sample surface. The most uniform heat distrubition on top surface was obtained for garlic powder as 0.012 (UI) at 70 C. The information obtained from this study is important to develop an effective RF heating strategy for pathogen control in seasoning mixture.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Low moisture foods, including seasoning spices, have been associated with a number of multi-state outbreaks of salmonellosis in the past decade. The long-term objective of this study was to develop an effective in-package pasteurization treatment for seasoning mixtures based on radio frequency (RF) heating. Seasoning spices obtained from grocery stores included red, white, and black pepper; cumin; curry powder; and garlic powder with moisture contents ranging from 3.1 to 12.3% (wet basis). The dielectric properties (DP) of the seasoning spices and their mixtures as influenced by frequency, moisture, temperature, mixing fraction and salt content were determined using a precision LCR meter and liquid test fixture at frequency ranging from 1 to 30 MHz. The RF heating rates of each spice and their mixtures were evaluated using a 27.12-MHz, 6-kW pilot scale RF system with 105 mm gap between electrodes. To evaluate the effect of mixing on heating uniformity, a sample (50 g) was placed into a polystyrene plastic cylindrical container and heated to 70 C, and surface images were taken by an infrared camera. The results showed that the relationship among moisture content, temperature and DP of white pepper can be explained by a second-order model at 13.56 and 27.12 MHz. The DP and heating rates of spice mixtures ranged between the highest and lowest values of their respective individual spices. Increase in salt content resulted in a decrease in heating rate, which resulted a better heating uniformity with smaller uniformity index (UI). The RF heating rate of samples ranged from 2.97 to 23.61 (C min1). The highest heating rate in samples was in a correspondence to the worst heating uniformity, and highest average temperature on the sample surface. The most uniform heat distrubition on top surface was obtained for garlic powder as 0.012 (UI) at 70 C. The information obtained from this study is important to develop an effective RF heating strategy for pathogen control in seasoning mixture.
Sun, S.; Li, C.; Paterson, A. H.; Jiang, Y.; Xu, R.; Roberson, J.; Snider, J.; Chee, P.
In-field high throughput phenotyping and cotton plant growth analysis using LiDAR Journal Article
In: Frontiers in Plant Sciences, 9, 16, 2018.
Abstract | Links | BibTeX | Tags: High-throughput phenotyping
@article{Sun2018,
title = {In-field high throughput phenotyping and cotton plant growth analysis using LiDAR},
author = {S. Sun and C. Li and A.H. Paterson and Y. Jiang and R. Xu and J. Roberson and J. Snider and P. Chee},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/In-Field-High-Throughput-Phenotyping-of-Cotton-Plant-Height-Using-LiDAR.pdf},
doi = {10.3389/fpls.2018.00016},
year = {2018},
date = {2018-01-30},
urldate = {2018-01-30},
journal = {Frontiers in Plant Sciences, 9, 16},
abstract = {Sun, S., Li, C., Paterson, A. H., Jiang, Y., Xu, R., Robertson, J. S., ... & Chee, P. W. (2018). In-field high throughput phenotyping and cotton plant growth analysis using LiDAR. Frontiers in Plant Science, 9, 16.
A LiDAR-based high-throughput phenotyping (HTP) system was developed for cotton plant phenotyping in the field. The HTP system consists of a 2D LiDAR and an RTK-GPS mounted on a high clearance tractor. The LiDAR scanned three rows of cotton plots simultaneously from the top and the RTK-GPS was used to provide the spatial coordinates of the point cloud during data collection. Configuration parameters of the system were optimized to ensure the best data quality. A height profile for each plot was extracted from the dense three dimensional point clouds; then the maximum height and height distribution of each plot were derived. In lab tests, single plants were scanned by LiDAR using 0.5◦ angular resolution and results showed an R 2 value of 1.00 (RMSE = 3.46 mm) in comparison to manual measurements. In field tests using the same angular resolution; the LiDAR-based HTP system achieved average R2 values of 0.98 (RMSE = 65 mm) for cotton plot height estimation; compared to manual measurements. This HTP system is particularly useful for large field application because it provides highly accurate measurements; and the efficiency is greatly improved compared to similar studies using the side view scan.},
keywords = {High-throughput phenotyping},
pubstate = {published},
tppubtype = {article}
}
A LiDAR-based high-throughput phenotyping (HTP) system was developed for cotton plant phenotyping in the field. The HTP system consists of a 2D LiDAR and an RTK-GPS mounted on a high clearance tractor. The LiDAR scanned three rows of cotton plots simultaneously from the top and the RTK-GPS was used to provide the spatial coordinates of the point cloud during data collection. Configuration parameters of the system were optimized to ensure the best data quality. A height profile for each plot was extracted from the dense three dimensional point clouds; then the maximum height and height distribution of each plot were derived. In lab tests, single plants were scanned by LiDAR using 0.5◦ angular resolution and results showed an R 2 value of 1.00 (RMSE = 3.46 mm) in comparison to manual measurements. In field tests using the same angular resolution; the LiDAR-based HTP system achieved average R2 values of 0.98 (RMSE = 65 mm) for cotton plot height estimation; compared to manual measurements. This HTP system is particularly useful for large field application because it provides highly accurate measurements; and the efficiency is greatly improved compared to similar studies using the side view scan.
2017
Xu, R.; Li, C.; Paterson, A. H.; Jiang, Y.; Sun, S.; Roberson, J.
Aerial Images and Convolutional Neural Network for Cotton Bloom Detection Journal Article
In: Frontiers in Plant Sciences, 8, 2235, 2017.
Abstract | Links | BibTeX | Tags: deep learning, High-throughput phenotyping, machine learning
@article{Xu2018,
title = {Aerial Images and Convolutional Neural Network for Cotton Bloom Detection},
author = {R. Xu and C. Li and A.H. Paterson and Y. Jiang and S. Sun and J. Roberson},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Aerial-Images-and-Convolutional-Neural-Network-for-Cotton-Bloom-Detection.pdf},
doi = {10.3389/fpls.2017.02235},
year = {2017},
date = {2017-12-19},
urldate = {2017-12-19},
journal = {Frontiers in Plant Sciences, 8, 2235},
abstract = {Xu, R., Li, C., Paterson, A. H., Jiang, Y., Sun, S., & Robertson, J. S. (2018). Aerial images and convolutional neural network for cotton bloom detection. Frontiers in Plant Science, 8, 2235.
Monitoring flower development can provide useful information for production management, estimating yield and selecting specific genotypes of crops. The main goal of this study was to develop a methodology to detect and count cotton flowers, or blooms, using color images acquired by an unmanned aerial system. The aerial images were collected from two test fields in 4 days. A convolutional neural network (CNN) was designed and trained to detect cotton blooms in raw images, and their 3D locations were calculated using the dense point cloud constructed from the aerial images with the structure from motion method. The quality of the dense point cloud was analyzed and plots with poor quality were excluded from data analysis. A constrained clustering algorithm was developed to register the same bloom detected from different images based on the 3D location of the bloom. The accuracy and incompleteness of the dense point cloud were analyzed because they affected the accuracy of the 3D location of the blooms and thus the accuracy of the bloom registration result. The constrained clustering algorithm was validated using simulated data, showing good efficiency and accuracy. The bloom count from the proposed method was comparable with the number counted manually with an error of −4 to 3 blooms for the field with a single plant per plot. However, more plots were underestimated in the field with multiple plants per plot due to hidden blooms that were not captured by the aerial images. The proposed methodology provides a high-throughput method to continuously monitor the flowering progress of cotton.},
keywords = {deep learning, High-throughput phenotyping, machine learning},
pubstate = {published},
tppubtype = {article}
}
Monitoring flower development can provide useful information for production management, estimating yield and selecting specific genotypes of crops. The main goal of this study was to develop a methodology to detect and count cotton flowers, or blooms, using color images acquired by an unmanned aerial system. The aerial images were collected from two test fields in 4 days. A convolutional neural network (CNN) was designed and trained to detect cotton blooms in raw images, and their 3D locations were calculated using the dense point cloud constructed from the aerial images with the structure from motion method. The quality of the dense point cloud was analyzed and plots with poor quality were excluded from data analysis. A constrained clustering algorithm was developed to register the same bloom detected from different images based on the 3D location of the bloom. The accuracy and incompleteness of the dense point cloud were analyzed because they affected the accuracy of the 3D location of the blooms and thus the accuracy of the bloom registration result. The constrained clustering algorithm was validated using simulated data, showing good efficiency and accuracy. The bloom count from the proposed method was comparable with the number counted manually with an error of −4 to 3 blooms for the field with a single plant per plot. However, more plots were underestimated in the field with multiple plants per plot due to hidden blooms that were not captured by the aerial images. The proposed methodology provides a high-throughput method to continuously monitor the flowering progress of cotton.
Jiang, Y.; Li, C.; Paterson, A. H.; Sun, S.; Xu, R.; Roberson, J.
Quantitative Analysis of Cotton Canopy Size in Field Conditions Using a Consumer-Grade RGB-D Camera Journal Article
In: Frontiers in Plant Sciences, 8, 2233, 2017.
Abstract | Links | BibTeX | Tags: High-throughput phenotyping
@article{Jiang2018,
title = {Quantitative Analysis of Cotton Canopy Size in Field Conditions Using a Consumer-Grade RGB-D Camera},
author = {Y. Jiang and C. Li and A.H. Paterson and S. Sun and R. Xu and J. Roberson},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Quantitative-Analysis-of-Cotton-Canopy-Size-in-Field-Conditions-Using-a-Consumer-Grade-RGB-D-Camera.pdf},
doi = {10.3389/fpls.2017.02233},
year = {2017},
date = {2017-12-19},
urldate = {2017-12-19},
journal = {Frontiers in Plant Sciences, 8, 2233},
abstract = {Jiang, Y., Li, C., Paterson, A. H., Sun, S., Xu, R., & Robertson, J. (2018). Quantitative analysis of cotton canopy size in field conditions using a consumer-grade RGB-D camera. Frontiers in plant science, 8, 2233.
Plant canopy structure can strongly affect crop functions such as yield and stress tolerance, and canopy size is an important aspect of canopy structure. Manual assessment of canopy size is laborious and imprecise, and cannot measure multi-dimensional traits such as projected leaf area and canopy volume. Field-based high throughput phenotyping systems with imaging capabilities can rapidly acquire data about plants in field conditions, making it possible to quantify and monitor plant canopy development. The goal of this study was to develop a 3D imaging approach to quantitatively analyze cotton canopy development in field conditions. A cotton field was planted with 128 plots, including four genotypes of 32 plots each. The field was scanned by GPhenoVision (a customized field-based high throughput phenotyping system) to acquire color and depth images with GPS information in 2016 covering two growth stages: canopy development, and flowering and boll development. A data processing pipeline was developed, consisting of three steps: plot point cloud reconstruction, plant canopy segmentation, and trait extraction. Plot point clouds were reconstructed using color and depth images with GPS information. In colorized point clouds, vegetation was segmented from the background using an excess-green (ExG) color filter, and cotton canopies were further separated from weeds based on height, size, and position information. Static morphological traits were extracted on each day, including univariate traits (maximum and mean canopy height and width, projected canopy area, and concave and convex volumes) and a multivariate trait (cumulative height profile). Growth rates were calculated for univariate static traits, quantifying canopy growth and development. Linear regressions were performed between the traits and fiber yield to identify the best traits and measurement time for yield prediction. The results showed that fiber yield was correlated with static traits after the canopy development stage (R2 = 0.35–0.71) and growth rates in early canopy development stages (R2 = 0.29–0.52). Multi-dimensional traits (e.g., projected canopy area and volume) outperformed one-dimensional traits, and the multivariate trait (cumulative height profile) outperformed univariate traits. The proposed approach would be useful for identification of quantitative trait loci (QTLs) controlling canopy size in genetics/genomics studies or for fiber yield prediction in breeding programs and production environments.},
keywords = {High-throughput phenotyping},
pubstate = {published},
tppubtype = {article}
}
Plant canopy structure can strongly affect crop functions such as yield and stress tolerance, and canopy size is an important aspect of canopy structure. Manual assessment of canopy size is laborious and imprecise, and cannot measure multi-dimensional traits such as projected leaf area and canopy volume. Field-based high throughput phenotyping systems with imaging capabilities can rapidly acquire data about plants in field conditions, making it possible to quantify and monitor plant canopy development. The goal of this study was to develop a 3D imaging approach to quantitatively analyze cotton canopy development in field conditions. A cotton field was planted with 128 plots, including four genotypes of 32 plots each. The field was scanned by GPhenoVision (a customized field-based high throughput phenotyping system) to acquire color and depth images with GPS information in 2016 covering two growth stages: canopy development, and flowering and boll development. A data processing pipeline was developed, consisting of three steps: plot point cloud reconstruction, plant canopy segmentation, and trait extraction. Plot point clouds were reconstructed using color and depth images with GPS information. In colorized point clouds, vegetation was segmented from the background using an excess-green (ExG) color filter, and cotton canopies were further separated from weeds based on height, size, and position information. Static morphological traits were extracted on each day, including univariate traits (maximum and mean canopy height and width, projected canopy area, and concave and convex volumes) and a multivariate trait (cumulative height profile). Growth rates were calculated for univariate static traits, quantifying canopy growth and development. Linear regressions were performed between the traits and fiber yield to identify the best traits and measurement time for yield prediction. The results showed that fiber yield was correlated with static traits after the canopy development stage (R2 = 0.35–0.71) and growth rates in early canopy development stages (R2 = 0.29–0.52). Multi-dimensional traits (e.g., projected canopy area and volume) outperformed one-dimensional traits, and the multivariate trait (cumulative height profile) outperformed univariate traits. The proposed approach would be useful for identification of quantitative trait loci (QTLs) controlling canopy size in genetics/genomics studies or for fiber yield prediction in breeding programs and production environments.
Jiang, Y.; Li, C.; Robertson, J. S.; Sun, S.; Xu, R.; Paterson, A. H.
GPhenoVision: A Ground Mobile System with Multi-modal Imaging for Field-Based High Throughput Phenotyping of Cotton Journal Article
In: Scientific Reports, 8(1), 1213, 2017.
Abstract | Links | BibTeX | Tags: High-throughput phenotyping
@article{Jiang2018b,
title = {GPhenoVision: A Ground Mobile System with Multi-modal Imaging for Field-Based High Throughput Phenotyping of Cotton},
author = {Y. Jiang and C. Li and J. S. Robertson and S. Sun and R. Xu and A.H. Paterson },
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/GPhenoVision-A-Ground-Mobile-System-with-Multi-modal-Imaging-for-Field-Based-High-Throughput-Phenotyping-of-Cotton.pdf},
doi = {10.1038/s41598-018-19142-2},
year = {2017},
date = {2017-11-30},
urldate = {2017-11-30},
journal = {Scientific Reports, 8(1), 1213},
abstract = {Jiang, Y., Li, C., Robertson, J. S., Sun, S., Xu, R., & Paterson, A. H. (2018). Gphenovision: a ground mobile system with multi-modal imaging for field-based high throughput phenotyping of cotton. Scientific reports, 8(1), 1213.
Imaging sensors can extend phenotyping capability, but they require a system to handle high-volume data. The overall goal of this study was to develop and evaluate a field-based high throughput phenotyping system accommodating high-resolution imagers. The system consisted of a high-clearance tractor and sensing and electrical systems. The sensing system was based on a distributed structure, integrating environmental sensors, real-time kinematic GPS, and multiple imaging sensors including RGB-D, thermal, and hyperspectral cameras. Custom software was developed with a multilayered architecture for system control and data collection. The system was evaluated by scanning a cotton field with 23 genotypes for quantification of canopy growth and development. A data processing pipeline was developed to extract phenotypes at the canopy level, including height, width, projected leaf area, and volume from RGB-D data and temperature from thermal images. Growth rates of morphological traits were accordingly calculated. The traits had strong correlations (r = 0.54–0.74) with fiber yield and good broad sense heritability (H2 = 0.27–0.72), suggesting the potential for conducting quantitative genetic analysis and contributing to yield prediction models. The developed system is a useful tool for a wide range of breeding/genetic, agronomic/physiological, and economic studies.},
keywords = {High-throughput phenotyping},
pubstate = {published},
tppubtype = {article}
}
Imaging sensors can extend phenotyping capability, but they require a system to handle high-volume data. The overall goal of this study was to develop and evaluate a field-based high throughput phenotyping system accommodating high-resolution imagers. The system consisted of a high-clearance tractor and sensing and electrical systems. The sensing system was based on a distributed structure, integrating environmental sensors, real-time kinematic GPS, and multiple imaging sensors including RGB-D, thermal, and hyperspectral cameras. Custom software was developed with a multilayered architecture for system control and data collection. The system was evaluated by scanning a cotton field with 23 genotypes for quantification of canopy growth and development. A data processing pipeline was developed to extract phenotypes at the canopy level, including height, width, projected leaf area, and volume from RGB-D data and temperature from thermal images. Growth rates of morphological traits were accordingly calculated. The traits had strong correlations (r = 0.54–0.74) with fiber yield and good broad sense heritability (H2 = 0.27–0.72), suggesting the potential for conducting quantitative genetic analysis and contributing to yield prediction models. The developed system is a useful tool for a wide range of breeding/genetic, agronomic/physiological, and economic studies.
Patrick, A.; Li, C.
High Throughput Phenotyping of Blueberry Bush Morphological Traits Using Unmanned Aerial Systems Journal Article
In: Remote Sensing, 9(12), 1250, 2017.
Abstract | Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, phenotyping robot, robotics
@article{Patrick2017,
title = {High Throughput Phenotyping of Blueberry Bush Morphological Traits Using Unmanned Aerial Systems},
author = {A. Patrick and C. Li
},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/High-Throughput-Phenotyping-of-Blueberry-Bush-Morphological-Traits-Using-Unmanned-Aerial-Systems.pdf},
doi = {10.3390/rs9121250},
year = {2017},
date = {2017-11-30},
urldate = {2017-11-30},
journal = {Remote Sensing, 9(12), 1250},
abstract = {Patrick, A., & Li, C. (2017). High throughput phenotyping of blueberry bush morphological traits using unmanned aerial systems. Remote Sensing, 9(12), 1250.
Phenotyping morphological traits of blueberry bushes in the field is important for selecting genotypes that are easily harvested by mechanical harvesters. Morphological data can also be used to assess the effects of crop treatments such as plant growth regulators, fertilizers, and environmental conditions. This paper investigates the feasibility and accuracy of an inexpensive unmanned aerial system in determining the morphological characteristics of blueberry bushes. Color images collected by a quadcopter are processed into three-dimensional point clouds via structure from motion algorithms. Bush height, extents, canopy area, and volume, in addition to crown diameter and width, are derived and referenced to ground truth. In an experimental farm, twenty-five bushes were imaged by a quadcopter. Height and width dimensions achieved a mean absolute error of 9.85 cm before and 5.82 cm after systematic under-estimation correction. Strong correlation was found between manual and image derived bush volumes and their traditional growth indices. Hedgerows of three Southern Highbush varieties were imaged at a commercial farm to extract five morphological features (base angle, blockiness, crown percent height, crown ratio, and vegetation ratio) associated with cultivation and machine harvestability. The bushes were found to be partially separable by multivariate analysis. The methodology developed from this study is not only valuable for plant breeders to screen genotypes with bush morphological traits that are suitable for machine harvest, but can also aid producers in crop management such as pruning and plot layout organization.},
keywords = {agricultural robot, High-throughput phenotyping, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
Phenotyping morphological traits of blueberry bushes in the field is important for selecting genotypes that are easily harvested by mechanical harvesters. Morphological data can also be used to assess the effects of crop treatments such as plant growth regulators, fertilizers, and environmental conditions. This paper investigates the feasibility and accuracy of an inexpensive unmanned aerial system in determining the morphological characteristics of blueberry bushes. Color images collected by a quadcopter are processed into three-dimensional point clouds via structure from motion algorithms. Bush height, extents, canopy area, and volume, in addition to crown diameter and width, are derived and referenced to ground truth. In an experimental farm, twenty-five bushes were imaged by a quadcopter. Height and width dimensions achieved a mean absolute error of 9.85 cm before and 5.82 cm after systematic under-estimation correction. Strong correlation was found between manual and image derived bush volumes and their traditional growth indices. Hedgerows of three Southern Highbush varieties were imaged at a commercial farm to extract five morphological features (base angle, blockiness, crown percent height, crown ratio, and vegetation ratio) associated with cultivation and machine harvestability. The bushes were found to be partially separable by multivariate analysis. The methodology developed from this study is not only valuable for plant breeders to screen genotypes with bush morphological traits that are suitable for machine harvest, but can also aid producers in crop management such as pruning and plot layout organization.
Kuzy, J. D.; Jiang, Y.; Li, C.
Blueberry bruise detection by pulsed thermographic imaging Journal Article
In: Postharvest Biology and Technology, 136, 166-177, 2017.
Abstract | Links | BibTeX | Tags: thermography
@article{Kuzy2017,
title = {Blueberry bruise detection by pulsed thermographic imaging},
author = {J. D. Kuzy and Y. Jiang and C. Li},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Blueberry-bruise-detection-by-pulsed-thermographic-imaging.pdf},
doi = {10.1016/j.postharvbio.2017.10.011},
year = {2017},
date = {2017-10-24},
urldate = {2017-10-24},
journal = {Postharvest Biology and Technology, 136, 166-177},
abstract = {Kuzy, J., Jiang, Y., & Li, C. (2018). Blueberry bruise detection by pulsed thermographic imaging. Postharvest Biology and Technology, 136, 166-177.
Blueberries are prone to internal bruising damage during harvesting and postharvest handling. Accurate assessment of bruising damage improves profitability by allowing allocation of berries to appropriate product streams. The goal of this study was to develop a pulsed thermographic imaging system and explore its feasibility in non-destructively detecting bruised blueberries. In this paper, the design and construction of a pulsed thermographic imaging system was described. A total of 200 blueberry fruit samples from two southern highbush cultivars (Farthing and Meadowlark) were collected and bruising treatments were applied to half of the samples. Relevant features were extracted and were demonstrated to be significantly different between healthy and bruised fruit. Classification was performed using linear discriminant analysis, support vector machine, random forest, k-nearest-neighbors, and logistic regression classifiers. Accuracies of up to 88% and 79% were obtained for Farthing and Meadowlark berries, respectively. These results demonstrate the feasibility of pulsed thermography to discriminate between bruised and healthy blueberries.},
keywords = {thermography},
pubstate = {published},
tppubtype = {article}
}
Blueberries are prone to internal bruising damage during harvesting and postharvest handling. Accurate assessment of bruising damage improves profitability by allowing allocation of berries to appropriate product streams. The goal of this study was to develop a pulsed thermographic imaging system and explore its feasibility in non-destructively detecting bruised blueberries. In this paper, the design and construction of a pulsed thermographic imaging system was described. A total of 200 blueberry fruit samples from two southern highbush cultivars (Farthing and Meadowlark) were collected and bruising treatments were applied to half of the samples. Relevant features were extracted and were demonstrated to be significantly different between healthy and bruised fruit. Classification was performed using linear discriminant analysis, support vector machine, random forest, k-nearest-neighbors, and logistic regression classifiers. Accuracies of up to 88% and 79% were obtained for Farthing and Meadowlark berries, respectively. These results demonstrate the feasibility of pulsed thermography to discriminate between bruised and healthy blueberries.
Gallardo, R. K.; Stafne, E. T.; DeVetter, L. W.; Zhang, Q.; Li, C.; Takeda, F.; Williamson, J.; Yan, W.; Beaudry, R.; Cline, W.; Allen, R.
Blueberry producers’ attitudes toward harvest mechanization for fresh market Journal Article
In: Hort Technology, 28(1), 10-16, 2017.
Abstract | Links | BibTeX | Tags:
@article{Gallardo2017,
title = {Blueberry producers’ attitudes toward harvest mechanization for fresh market},
author = {R. K. Gallardo and E.T. Stafne and L.W. DeVetter and Q. Zhang and C. Li and F. Takeda and J. Williamson and W. Yan and R. Beaudry and W. Cline and R. Allen},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Blueberry-producers’-attitudes-toward-harvest-mechanization-for-fresh-market.pdf},
doi = {10.21273/HORTTECH03872-17},
year = {2017},
date = {2017-09-14},
journal = {Hort Technology, 28(1), 10-16},
abstract = {Gallardo, R. K., Stafne, E. T., DeVetter, L. W., Zhang, Q., Li, C., Takeda, F., ... & Allen, R. (2018). Blueberry producers’ attitudes toward harvest mechanization for fresh market. Horttechnology, 28(1), 10-16.
The availability and cost of agricultural labor is constraining the specialty crop industry throughout the United States. Most soft fruits destined for the fresh market are fragile and are usually hand harvested to maintain optimal quality and postharvest longevity. However, because of labor shortages, machine harvest options are being explored out of necessity. A survey on machine harvest of blueberries (Vaccinium sp.) for fresh market was conducted in 2015 and 2016 in seven U.S. states and one Canadian province. Survey respondents totaled 223 blueberry producers of various production sizes and scope. A majority (61%) indicated that their berries were destined for fresh markets with 33% machine harvested for this purpose. Eighty percent said that they thought fruit quality was the limiting factor for machine-harvested blueberries destined for fresh markets. Many producers had used mechanized harvesters, but their experience varied greatly. Just less than half (47%) used mechanical harvesters for fewer than 5 years. Most respondents indicated that labor was a primary concern, as well as competing markets and weather. New technologies that reduce harvesting constraints, such as improvements to harvest machinery and packing lines, were of interest to most respondents. Forty-five percent stated they would be interested in using a modified harvest-aid platform with handheld shaking devices if it is viable (i.e., fruit quality and picking efficiency is maintained and the practice is cost effective). Overall, the survey showed that blueberry producers have great concerns with labor costs and availability and are open to exploring mechanization as a way to mitigate the need for hand-harvest labor.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
The availability and cost of agricultural labor is constraining the specialty crop industry throughout the United States. Most soft fruits destined for the fresh market are fragile and are usually hand harvested to maintain optimal quality and postharvest longevity. However, because of labor shortages, machine harvest options are being explored out of necessity. A survey on machine harvest of blueberries (Vaccinium sp.) for fresh market was conducted in 2015 and 2016 in seven U.S. states and one Canadian province. Survey respondents totaled 223 blueberry producers of various production sizes and scope. A majority (61%) indicated that their berries were destined for fresh markets with 33% machine harvested for this purpose. Eighty percent said that they thought fruit quality was the limiting factor for machine-harvested blueberries destined for fresh markets. Many producers had used mechanized harvesters, but their experience varied greatly. Just less than half (47%) used mechanical harvesters for fewer than 5 years. Most respondents indicated that labor was a primary concern, as well as competing markets and weather. New technologies that reduce harvesting constraints, such as improvements to harvest machinery and packing lines, were of interest to most respondents. Forty-five percent stated they would be interested in using a modified harvest-aid platform with handheld shaking devices if it is viable (i.e., fruit quality and picking efficiency is maintained and the practice is cost effective). Overall, the survey showed that blueberry producers have great concerns with labor costs and availability and are open to exploring mechanization as a way to mitigate the need for hand-harvest labor.
Zhang, M.; Li, C.; Takeda, F.; Yang, F.
Detection of internally bruised blueberries using hyperspectral transmittance imaging Journal Article
In: Transactions of ASABE, 60(5), 1489-1502, 2017.
Abstract | Links | BibTeX | Tags: hyperspectral
@article{Zhang2017,
title = {Detection of internally bruised blueberries using hyperspectral transmittance imaging},
author = {M. Zhang and C. Li and F. Takeda and F. Yang},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Detection-of-internally-bruised-blueberries-using-hyperspectral-transmittance-imaging.pdf},
doi = {10.13031/trans.12197},
year = {2017},
date = {2017-08-17},
urldate = {2017-08-17},
journal = {Transactions of ASABE, 60(5), 1489-1502},
abstract = {Zhang, M., Li, C., Takeda, F., & Yang, F. (2017). Detection of internally bruised blueberries using hyperspectral transmittance imaging. Transactions of the ASABE, 60(5), 1489-1502.
Internal bruise damage that occurs in blueberry fruit during harvest operations and postharvest handling lowers the overall quality and causes significant economic losses. The main goal of this study was to nondestructively detect internal bruises in blueberries after mechanical damage using hyperspectral transmittance imaging. A total of 600 hand-harvested blueberries were divided into 20 groups of four storage times (30 min, 3 h, 12 h, and 24 h), two storage temperatures (22°C and 4°C), and three treatments (stem bruise, equator bruise, and control). A near-infrared hyperspectral imaging system was used to acquire transmittance images from 970 to 1400 nm with 5 nm bandwidth. Images were acquired from three orientations (calyx-up, stem-up, and equator-up) for fruit in the control and stem bruise groups and from four orientations (calyx-up, stem-up, equator-up, and equator-down) in the equator bruise groups. Immediately after imaging, the fruit samples were sliced, and the sliced surfaces were photographed. The color images of sliced fruit were used as references. By comparing with the reference color images, the profiles of spatial and spectral intensities were evaluated to observe the effect of orientation and help extract regions of interest (ROIs) of bruised and healthy tissues. A support vector machine (SVM) classifier was trained and tested to classify pixels of bruised and healthy tissues. Classification maps were produced, and the bruise ratio was calculated to identify bruised blueberries (bruise ratio >25%). The average accuracy of blueberry identification was 94.5% with the stem-up orientation. The results indicate that detecting bruised blueberries as soon as 30 min after mechanical damage is feasible using hyperspectral transmittance imaging.},
keywords = {hyperspectral},
pubstate = {published},
tppubtype = {article}
}
Internal bruise damage that occurs in blueberry fruit during harvest operations and postharvest handling lowers the overall quality and causes significant economic losses. The main goal of this study was to nondestructively detect internal bruises in blueberries after mechanical damage using hyperspectral transmittance imaging. A total of 600 hand-harvested blueberries were divided into 20 groups of four storage times (30 min, 3 h, 12 h, and 24 h), two storage temperatures (22°C and 4°C), and three treatments (stem bruise, equator bruise, and control). A near-infrared hyperspectral imaging system was used to acquire transmittance images from 970 to 1400 nm with 5 nm bandwidth. Images were acquired from three orientations (calyx-up, stem-up, and equator-up) for fruit in the control and stem bruise groups and from four orientations (calyx-up, stem-up, equator-up, and equator-down) in the equator bruise groups. Immediately after imaging, the fruit samples were sliced, and the sliced surfaces were photographed. The color images of sliced fruit were used as references. By comparing with the reference color images, the profiles of spatial and spectral intensities were evaluated to observe the effect of orientation and help extract regions of interest (ROIs) of bruised and healthy tissues. A support vector machine (SVM) classifier was trained and tested to classify pixels of bruised and healthy tissues. Classification maps were produced, and the bruise ratio was calculated to identify bruised blueberries (bruise ratio >25%). The average accuracy of blueberry identification was 94.5% with the stem-up orientation. The results indicate that detecting bruised blueberries as soon as 30 min after mechanical damage is feasible using hyperspectral transmittance imaging.
Fan, S.; Li, C.; Huang, W.; Chen, L.
Detection of blueberry internal bruising over time using NIR hyperspectral reflectance imaging with optimum wavelengths Journal Article
In: Postharvest Biology and Technology, 134, 55-66, 2017.
Abstract | Links | BibTeX | Tags: hyperspectral
@article{Fan2017,
title = {Detection of blueberry internal bruising over time using NIR hyperspectral reflectance imaging with optimum wavelengths},
author = {S. Fan and C. Li and W. Huang and L. Chen},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Detection-of-blueberry-internal-bruising-over-time-using-NIR-hyperspectral-reflectance-imaging-with-optimum-wavelengths.pdf},
doi = {10.1016/j.postharvbio.2017.08.012},
year = {2017},
date = {2017-08-13},
urldate = {2017-08-13},
journal = {Postharvest Biology and Technology, 134, 55-66},
abstract = {Fan, S., Li, C., Huang, W., & Chen, L. (2017). Detection of blueberry internal bruising over time using NIR hyperspectral reflectance imaging with optimum wavelengths. Postharvest biology and technology, 134, 55-66.
Early detection of internal bruising is one of the major challenges in blueberry postharvest quality sorting processes. The potential of using near infrared (NIR) hyperspectral reflectance imaging (950–1650 nm) with reduced spectral features was investigated for blueberry internal bruising detection 30 min to 12 h after mechanical impact. A least squares support vector machine (LS-SVM) was used to develop classification models to compute the spatial distribution of bruising based on the spectra extracted from regions of interest (ROIs) at four measurement times (30 min, 2 h, 6 h, and 12 h after mechanical impact). Three feature selection methods were used to select optimum wavelengths or band ratio images for bruising detection. The classification model, developed using optimum wavelengths selected by competitive adaptive reweighted sampling (CARS) (CARS-LS-SVM model) and full spectra (full spectra-LS-SVM), had similar performance in the identification of bruised blueberries. Band ratio images (1235 nm/1035 nm) achieved a comparable accuracy with the CARS-LS-SVM model at 6 h, and higher accuracy than CARS-LS-SVM and full spectra-LS-SVM models at 12 h. The overall classification accuracies of 77.5%, 83.8%, 92.5%, and 95.0% were obtained by band ratio images for blueberries 30 min, 2 h, 6 h, and 12 h after impact, respectively. In order to evaluate the performance of the proposed methods, additional validation samples were processed by the detection algorithm. The overall discrimination accuracies for healthy and bruised blueberries in the validation set were 93.3% and 98.0%, respectively, for CARS-LS-SVM model, and 93.3% and 95.9%, respectively, for band ratio images. The overall results indicated that NIR reflectance imaging can detect blueberry internal bruising as early as 30 min after mechanical impact, and band ratio images computed from two wavelengths showed great potential to detect blueberry internal bruising on the packing line.},
keywords = {hyperspectral},
pubstate = {published},
tppubtype = {article}
}
Early detection of internal bruising is one of the major challenges in blueberry postharvest quality sorting processes. The potential of using near infrared (NIR) hyperspectral reflectance imaging (950–1650 nm) with reduced spectral features was investigated for blueberry internal bruising detection 30 min to 12 h after mechanical impact. A least squares support vector machine (LS-SVM) was used to develop classification models to compute the spatial distribution of bruising based on the spectra extracted from regions of interest (ROIs) at four measurement times (30 min, 2 h, 6 h, and 12 h after mechanical impact). Three feature selection methods were used to select optimum wavelengths or band ratio images for bruising detection. The classification model, developed using optimum wavelengths selected by competitive adaptive reweighted sampling (CARS) (CARS-LS-SVM model) and full spectra (full spectra-LS-SVM), had similar performance in the identification of bruised blueberries. Band ratio images (1235 nm/1035 nm) achieved a comparable accuracy with the CARS-LS-SVM model at 6 h, and higher accuracy than CARS-LS-SVM and full spectra-LS-SVM models at 12 h. The overall classification accuracies of 77.5%, 83.8%, 92.5%, and 95.0% were obtained by band ratio images for blueberries 30 min, 2 h, 6 h, and 12 h after impact, respectively. In order to evaluate the performance of the proposed methods, additional validation samples were processed by the detection algorithm. The overall discrimination accuracies for healthy and bruised blueberries in the validation set were 93.3% and 98.0%, respectively, for CARS-LS-SVM model, and 93.3% and 95.9%, respectively, for band ratio images. The overall results indicated that NIR reflectance imaging can detect blueberry internal bruising as early as 30 min after mechanical impact, and band ratio images computed from two wavelengths showed great potential to detect blueberry internal bruising on the packing line.
Zhang, M.; Li, C.; Yang, F.
Classification of Foreign Matter Embedded inside Cotton Lint using Short Wave Infrared (SWIR) Hyperspectral Transmittance Imaging Journal Article
In: Computers and Electronics in Agriculture, 139, 75-90, 2017.
Abstract | Links | BibTeX | Tags: hyperspectral
@article{Zhang2017b,
title = {Classification of Foreign Matter Embedded inside Cotton Lint using Short Wave Infrared (SWIR) Hyperspectral Transmittance Imaging},
author = {M. Zhang and C. Li and F. Yang},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Classification-of-Foreign-Matter-Embedded-inside-Cotton-Lint-using-Short-Wave-Infrared-SWIR-Hyperspectral-Transmittance-Imaging.pdf},
doi = {10.1016/j.compag.2017.05.005},
year = {2017},
date = {2017-06-18},
urldate = {2017-06-18},
journal = {Computers and Electronics in Agriculture, 139, 75-90},
abstract = {Zhang, M., Li, C., & Yang, F. (2017). Classification of foreign matter embedded inside cotton lint using short wave infrared (SWIR) hyperspectral transmittance imaging. Computers and Electronics in Agriculture, 139, 75-90.
Cotton is an important source of natural fiber around the world. Cotton lint, however, could be contaminated by various types of foreign matter (FM) during harvesting and processing, leading to reduced quality and potentially even defective textile products. Current sensing methods can detect the presence of foreign matter on the surface of cotton lint, but they are not able to efficiently detect and classify foreign matter that is mixed with or embedded inside cotton lint. This study focused on the detection and classification of common types of foreign matter hidden within the cotton lint by a short wave near infrared hyperspectral imaging (HSI) system using the transmittance mode. Fourteen common categories of foreign matter and cotton lint were collected from the field and the foreign matter particles were sandwiched between two thin cotton lint webs. Operation parameters were optimized through a series of experiments for the best performance of the transmittance mode. After acquiring transmittance images of the cotton lint and foreign matter mixture, minimum noise fraction (MNF) rotation was utilized to obtain component images to assist visual detection and mean spectra extraction from a total of 141 wavelength bands. The optimal spectral bands were identified by using the minimal-redundancy-maximal-relevance (mRMR)-based feature selection method. Linear discriminant analysis (LDA) and a support vector machine (SVM) were employed to classify foreign matter at the spectral and pixel level, respectively. Over 95% classification accuracies for the spectra and the images were achieved using the selected optimal wavelengths. This study indicated that it was feasible to detect botanical (e.g. seed coat, seed meat, stem, and leaf) and non-botanical (e.g. paper, and plastic package) types of foreign matter that were embedded inside cotton lint using short wave infrared hyperspectral transmittance imaging.},
keywords = {hyperspectral},
pubstate = {published},
tppubtype = {article}
}
Cotton is an important source of natural fiber around the world. Cotton lint, however, could be contaminated by various types of foreign matter (FM) during harvesting and processing, leading to reduced quality and potentially even defective textile products. Current sensing methods can detect the presence of foreign matter on the surface of cotton lint, but they are not able to efficiently detect and classify foreign matter that is mixed with or embedded inside cotton lint. This study focused on the detection and classification of common types of foreign matter hidden within the cotton lint by a short wave near infrared hyperspectral imaging (HSI) system using the transmittance mode. Fourteen common categories of foreign matter and cotton lint were collected from the field and the foreign matter particles were sandwiched between two thin cotton lint webs. Operation parameters were optimized through a series of experiments for the best performance of the transmittance mode. After acquiring transmittance images of the cotton lint and foreign matter mixture, minimum noise fraction (MNF) rotation was utilized to obtain component images to assist visual detection and mean spectra extraction from a total of 141 wavelength bands. The optimal spectral bands were identified by using the minimal-redundancy-maximal-relevance (mRMR)-based feature selection method. Linear discriminant analysis (LDA) and a support vector machine (SVM) were employed to classify foreign matter at the spectral and pixel level, respectively. Over 95% classification accuracies for the spectra and the images were achieved using the selected optimal wavelengths. This study indicated that it was feasible to detect botanical (e.g. seed coat, seed meat, stem, and leaf) and non-botanical (e.g. paper, and plastic package) types of foreign matter that were embedded inside cotton lint using short wave infrared hyperspectral transmittance imaging.
Ozturk, S.; Kong, F.; Singh, R. K.; Kuzy, J. D.; Li, C.
Radio frequency heating of corn flour: Heating rate and uniformity Journal Article
In: Innovative Food Science & Emerging Technologies, 44, 191-201, 2017.
Abstract | Links | BibTeX | Tags:
@article{Ozturk2017,
title = {Radio frequency heating of corn flour: Heating rate and uniformity},
author = {S. Ozturk and F. Kong and R. K. Singh and J. D. Kuzy and C. Li},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Radio-frequency-heating-of-corn-flour-Heating-rate-and-uniformity.pdf},
doi = {10.1016/j.ifset.2017.05.001},
year = {2017},
date = {2017-05-02},
journal = {Innovative Food Science & Emerging Technologies, 44, 191-201},
abstract = {Ozturk, S., Kong, F., Singh, R. K., Kuzy, J. D., & Li, C. (2017). Radio frequency heating of corn flour: Heating rate and uniformity. Innovative food science & emerging technologies, 44, 191-201.
Non-uniform heating is a major challenge for using radio frequency (RF) heat treatment in pasteurization of low moisture food products. The objective of this study was to evaluate the effect of different electrode gaps, moisture content (MC), bulk density and surrounding materials on RF heating uniformity and rate in corn flour. Additionally, the dielectric and thermal properties of corn flour were determined as affected by MC, temperature (°C), and frequency (MHz). Changes in MC, water activity (aw) and color in the sample after RF heating were measured to evaluate treatment effect on food quality. A precision LCR meter and a liquid test fixture were used to study DP of the sample at RF frequency ranging from 1 to 30 MHz. The RF heating uniformity and temperature profiles of corn flour as exposed to RF heating were obtained with an infrared camera and a data logger connected to a fiber optic sensor. The DP values increased with increasing MC and temperature, but decreased with frequency. The heating rate increased from 3.5 to 6.8 °C min− 1 with increasing MC (from 10.4 to 16.7%), but decreased from 12.7 to 5.2 °C min− 1 with increasing electron gap (from 11 to 15 cm). The corner and edge heating were observed at all layers of the samples for all the distances, and the hottest and the most uniform layer were determined as the middle layer at an electrode gap of 15 cm. Glass petri dish provided better uniformity than those of polyester plastic petri dish. Covering by foam led to more uniform RF heating uniformity in corn flour, and better moisture and aw distribution. This study provided useful information to develop an effective RF process as an alternative of conventional thermal treatments for pasteurization of low-moisture products.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Non-uniform heating is a major challenge for using radio frequency (RF) heat treatment in pasteurization of low moisture food products. The objective of this study was to evaluate the effect of different electrode gaps, moisture content (MC), bulk density and surrounding materials on RF heating uniformity and rate in corn flour. Additionally, the dielectric and thermal properties of corn flour were determined as affected by MC, temperature (°C), and frequency (MHz). Changes in MC, water activity (aw) and color in the sample after RF heating were measured to evaluate treatment effect on food quality. A precision LCR meter and a liquid test fixture were used to study DP of the sample at RF frequency ranging from 1 to 30 MHz. The RF heating uniformity and temperature profiles of corn flour as exposed to RF heating were obtained with an infrared camera and a data logger connected to a fiber optic sensor. The DP values increased with increasing MC and temperature, but decreased with frequency. The heating rate increased from 3.5 to 6.8 °C min− 1 with increasing MC (from 10.4 to 16.7%), but decreased from 12.7 to 5.2 °C min− 1 with increasing electron gap (from 11 to 15 cm). The corner and edge heating were observed at all layers of the samples for all the distances, and the hottest and the most uniform layer were determined as the middle layer at an electrode gap of 15 cm. Glass petri dish provided better uniformity than those of polyester plastic petri dish. Covering by foam led to more uniform RF heating uniformity in corn flour, and better moisture and aw distribution. This study provided useful information to develop an effective RF process as an alternative of conventional thermal treatments for pasteurization of low-moisture products.
Takeda, F.; Yang, W.; Li, C.; Freivalds, A.; Sung, K.; Xu, R.; Hu, B.; Williamson, J.; Sargent, S.
Applying New Technologies to Transform Blueberry Harvesting Journal Article
In: Agronomy, 7(2), 33, 2017.
Abstract | Links | BibTeX | Tags:
@article{Takeda2017,
title = {Applying New Technologies to Transform Blueberry Harvesting},
author = {F. Takeda and W. Yang and C. Li and A. Freivalds and K. Sung and R. Xu and B. Hu and J. Williamson and S. Sargent},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/Applying-New-Technologies-to-Transform-Blueberry-Harvesting.pdf},
doi = {10.3390/agronomy7020033},
year = {2017},
date = {2017-04-27},
journal = {Agronomy, 7(2), 33},
abstract = {Takeda, F., Yang, W., Li, C., Freivalds, A., Sung, K., Xu, R., ... & Sargent, S. (2017). Applying new technologies to transform blueberry harvesting. Agronomy, 7(2), 33.
The growth of the blueberry industry in the past three decades has been remarkably robust. However, a labor shortage for hand harvesting, increasingly higher labor costs, and low harvest efficiencies are becoming bottlenecks for sustainable development of the fresh market blueberry production. In this study, we evaluated semi-mechanical harvesting systems consisting of a harvest-aid platform with soft fruit catching surfaces that collected the fruit detached by portable, hand-held, pneumatic shakers. The softer fruit catching surfaces were not glued to the hard sub-surfaces of the harvest-aid platform, but suspended over them. Also, the ergonomic aspect of operating powered harvesting equipment was determined. The pneumatic shakers removed 3.5 to 15 times more fruit (g/min) than by hand. Soft fruit catching surfaces reduced impact force and bruise damage. Fruit firmness was higher in fruit harvested by hand compared to that by pneumatic shakers in some cultivars. The bruise area was less than 8% in fruit harvested by hand and with semi-mechanical harvesting system. The percentage of blue, packable fruit harvested by pneumatic shakers comprised as much as 90% of the total, but less than that of hand-harvested fruit. The ergonomic analysis by electromyography showed that muscle strain in the back, shoulders, and forearms was low in workers operating the light-weight, pneumatic shakers that were tethered to the platform with a tool balancer. The new harvesting method can reduce the labor requirement to about 100 hour/hectare/year and help to mitigate the rising labor cost and shortage of workers for harvesting fresh-market quality blueberries.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
The growth of the blueberry industry in the past three decades has been remarkably robust. However, a labor shortage for hand harvesting, increasingly higher labor costs, and low harvest efficiencies are becoming bottlenecks for sustainable development of the fresh market blueberry production. In this study, we evaluated semi-mechanical harvesting systems consisting of a harvest-aid platform with soft fruit catching surfaces that collected the fruit detached by portable, hand-held, pneumatic shakers. The softer fruit catching surfaces were not glued to the hard sub-surfaces of the harvest-aid platform, but suspended over them. Also, the ergonomic aspect of operating powered harvesting equipment was determined. The pneumatic shakers removed 3.5 to 15 times more fruit (g/min) than by hand. Soft fruit catching surfaces reduced impact force and bruise damage. Fruit firmness was higher in fruit harvested by hand compared to that by pneumatic shakers in some cultivars. The bruise area was less than 8% in fruit harvested by hand and with semi-mechanical harvesting system. The percentage of blue, packable fruit harvested by pneumatic shakers comprised as much as 90% of the total, but less than that of hand-harvested fruit. The ergonomic analysis by electromyography showed that muscle strain in the back, shoulders, and forearms was low in workers operating the light-weight, pneumatic shakers that were tethered to the platform with a tool balancer. The new harvesting method can reduce the labor requirement to about 100 hour/hectare/year and help to mitigate the rising labor cost and shortage of workers for harvesting fresh-market quality blueberries.
Sun, S.; Li, C.; Paterson, A. H.
In-Field High-Throughput Phenotyping of Cotton Plant Height Using LiDAR Journal Article
In: Remote Sensing, 9(4), 377, 2017.
Abstract | Links | BibTeX | Tags: High-throughput phenotyping
@article{Sun2017,
title = {In-Field High-Throughput Phenotyping of Cotton Plant Height Using LiDAR},
author = {S. Sun and C. Li and A.H. Paterson},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/In-Field-High-Throughput-Phenotyping-of-Cotton-Plant-Height-Using-LiDAR-1.pdf},
doi = {10.3390/rs9040377},
year = {2017},
date = {2017-04-13},
urldate = {2017-04-13},
journal = {Remote Sensing, 9(4), 377},
abstract = {Sun, S., Li, C., & Paterson, A. (2017). In-field high-throughput phenotyping of cotton plant height using LiDAR. Remote Sensing, 9(4), 377.
A LiDAR-based high-throughput phenotyping (HTP) system was developed for cotton plant phenotyping in the field. The HTP system consists of a 2D LiDAR and an RTK-GPS mounted on a high clearance tractor. The LiDAR scanned three rows of cotton plots simultaneously from the top and the RTK-GPS was used to provide the spatial coordinates of the point cloud during data collection. Configuration parameters of the system were optimized to ensure the best data quality. A height profile for each plot was extracted from the dense three dimensional point clouds; then the maximum height and height distribution of each plot were derived. In lab tests, single plants were scanned by LiDAR using 0.5° angular resolution and results showed an R2 value of 1.00 (RMSE = 3.46 mm) in comparison to manual measurements. In field tests using the same angular resolution; the LiDAR-based HTP system achieved average R2 values of 0.98 (RMSE = 65 mm) for cotton plot height estimation; compared to manual measurements. This HTP system is particularly useful for large field application because it provides highly accurate measurements; and the efficiency is greatly improved compared to similar studies using the side view scan.},
keywords = {High-throughput phenotyping},
pubstate = {published},
tppubtype = {article}
}
A LiDAR-based high-throughput phenotyping (HTP) system was developed for cotton plant phenotyping in the field. The HTP system consists of a 2D LiDAR and an RTK-GPS mounted on a high clearance tractor. The LiDAR scanned three rows of cotton plots simultaneously from the top and the RTK-GPS was used to provide the spatial coordinates of the point cloud during data collection. Configuration parameters of the system were optimized to ensure the best data quality. A height profile for each plot was extracted from the dense three dimensional point clouds; then the maximum height and height distribution of each plot were derived. In lab tests, single plants were scanned by LiDAR using 0.5° angular resolution and results showed an R2 value of 1.00 (RMSE = 3.46 mm) in comparison to manual measurements. In field tests using the same angular resolution; the LiDAR-based HTP system achieved average R2 values of 0.98 (RMSE = 65 mm) for cotton plot height estimation; compared to manual measurements. This HTP system is particularly useful for large field application because it provides highly accurate measurements; and the efficiency is greatly improved compared to similar studies using the side view scan.
Kuzy, J. D.; Li, C.
A Pulsed Thermographic Imaging System for Detection and Identification of Cotton Foreign Matter Journal Article
In: Sensors, 17(3), 518, 2017.
Abstract | Links | BibTeX | Tags: thermography
@article{Kuzy2017b,
title = {A Pulsed Thermographic Imaging System for Detection and Identification of Cotton Foreign Matter },
author = {J. D. Kuzy and C. Li},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/A-Pulsed-Thermographic-Imaging-System-for-Detection-and-Identification-of-Cotton-Foreign-Matter.pdf},
doi = {10.3390/s17030518},
year = {2017},
date = {2017-03-02},
urldate = {2017-03-02},
journal = {Sensors, 17(3), 518},
abstract = {Kuzy, J., & Li, C. (2017). A pulsed thermographic imaging system for detection and identification of cotton foreign matter. Sensors, 17(3), 518.
Detection of foreign matter in cleaned cotton is instrumental to accurately grading cotton quality, which in turn impacts the marketability of the cotton. Current grading systems return estimates of the amount of foreign matter present, but provide no information about the identity of the contaminants. This paper explores the use of pulsed thermographic analysis to detect and identify cotton foreign matter. The design and implementation of a pulsed thermographic analysis system is described. A sample set of 240 foreign matter and cotton lint samples were collected. Hand-crafted waveform features and frequency-domain features were extracted and analyzed for statistical significance. Classification was performed on these features using linear discriminant analysis and support vector machines. Using waveform features and support vector machine classifiers, detection of cotton foreign matter was performed with 99.17% accuracy. Using frequency-domain features and linear discriminant analysis, identification was performed with 90.00% accuracy. These results demonstrate that pulsed thermographic imaging analysis produces data which is of significant utility for the detection and identification of cotton foreign matter.},
keywords = {thermography},
pubstate = {published},
tppubtype = {article}
}
Detection of foreign matter in cleaned cotton is instrumental to accurately grading cotton quality, which in turn impacts the marketability of the cotton. Current grading systems return estimates of the amount of foreign matter present, but provide no information about the identity of the contaminants. This paper explores the use of pulsed thermographic analysis to detect and identify cotton foreign matter. The design and implementation of a pulsed thermographic analysis system is described. A sample set of 240 foreign matter and cotton lint samples were collected. Hand-crafted waveform features and frequency-domain features were extracted and analyzed for statistical significance. Classification was performed on these features using linear discriminant analysis and support vector machines. Using waveform features and support vector machine classifiers, detection of cotton foreign matter was performed with 99.17% accuracy. Using frequency-domain features and linear discriminant analysis, identification was performed with 90.00% accuracy. These results demonstrate that pulsed thermographic imaging analysis produces data which is of significant utility for the detection and identification of cotton foreign matter.
Patrick, A.; Pelham, S.; Culbreath, A.; Holbrook, C.; Godoy, I. J. d.; Li, C.
High Throughput Phenotyping of Tomato Spot Wilt Disease in Peanuts Using Unmanned Aerial Systems and Multispectral Imaging Journal Article
In: IEEE Instrumentation & Measurement Magazine, 20(3), 4-12, 2017.
Abstract | Links | BibTeX | Tags: agricultural robot, High-throughput phenotyping, phenotyping robot, robotics
@article{Patrick2017b,
title = {High Throughput Phenotyping of Tomato Spot Wilt Disease in Peanuts Using Unmanned Aerial Systems and Multispectral Imaging},
author = {A. Patrick and S. Pelham and A. Culbreath and C. Holbrook and I.J.d. Godoy and C. Li},
url = {http://sensinglab.engr.uga.edu//srv/htdocs/wp-content/uploads/2019/11/High-Throughput-Phenotyping-of-Tomato-Spot-Wilt-Disease-in-Peanuts-Using-Unmanned-Aerial-Systems-and-Multispectral-Imaging.pdf},
doi = {10.1109/MIM.2017.7951684},
year = {2017},
date = {2017-02-08},
urldate = {2017-02-08},
journal = {IEEE Instrumentation & Measurement Magazine, 20(3), 4-12},
abstract = {Patrick, A., Pelham, S., Culbreath, A., Holbrook, C. C., De Godoy, I. J., & Li, C. (2017). High throughput phenotyping of tomato spot wilt disease in peanuts using unmanned aerial systems and multispectral imaging. IEEE Instrumentation & Measurement Magazine, 20(3), 4-12.
The amount of visible and near infrared light reflected by plants varies depending on their health. In this study, multispectral images were acquired by a quadcopter for high throughput phenotyping of tomato spot wilt disease resistance among twenty genotypes of peanuts. The plants were visually assessed to acquire ground truth ratings of disease incidence. Multispectral images were processed into several vegetation indices. The vegetation index image of each plot has a unique distribution of pixel intensities. The percentage and number of pixels above and below varying thresholds were extracted. These features were correlated with manually acquired data to develop a model for assessing the percentage of each plot diseased. Ultimately, the best vegetation indices and pixel distribution feature for disease detection were determined and correlated with manual ratings and yield. The relative resistance of each genotype was then compared. Image-based disease ratings effectively ranked genotype resistance as early as 93 days from seeding.},
keywords = {agricultural robot, High-throughput phenotyping, phenotyping robot, robotics},
pubstate = {published},
tppubtype = {article}
}
The amount of visible and near infrared light reflected by plants varies depending on their health. In this study, multispectral images were acquired by a quadcopter for high throughput phenotyping of tomato spot wilt disease resistance among twenty genotypes of peanuts. The plants were visually assessed to acquire ground truth ratings of disease incidence. Multispectral images were processed into several vegetation indices. The vegetation index image of each plot has a unique distribution of pixel intensities. The percentage and number of pixels above and below varying thresholds were extracted. These features were correlated with manually acquired data to develop a model for assessing the percentage of each plot diseased. Ultimately, the best vegetation indices and pixel distribution feature for disease detection were determined and correlated with manual ratings and yield. The relative resistance of each genotype was then compared. Image-based disease ratings effectively ranked genotype resistance as early as 93 days from seeding.