<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article article-type="research-article" dtd-version="2.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Environ. Sci.</journal-id>
<journal-title>Frontiers in Environmental Science</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Environ. Sci.</abbrev-journal-title>
<issn pub-type="epub">2296-665X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1083328</article-id>
<article-id pub-id-type="doi">10.3389/fenvs.2023.1083328</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Environmental Science</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Spatial distribution and temporal variation of tropical mountaintop vegetation through images obtained by drones</article-title>
<alt-title alt-title-type="left-running-head">Medeiros et al.</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fenvs.2023.1083328">10.3389/fenvs.2023.1083328</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Medeiros</surname>
<given-names>Tha&#xed;s Pereira de</given-names>
</name>
<xref ref-type="aff" rid="aff1">
<sup>1</sup>
</xref>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1841446/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Morellato</surname>
<given-names>Leonor Patr&#xed;cia Cerdeira</given-names>
</name>
<xref ref-type="aff" rid="aff2">
<sup>2</sup>
</xref>
<uri xlink:href="https://loop.frontiersin.org/people/889177/overview"/>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Silva</surname>
<given-names>Thiago Sanna Freire</given-names>
</name>
<xref ref-type="aff" rid="aff3">
<sup>3</sup>
</xref>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
</contrib>
</contrib-group>
<aff id="aff1">
<sup>1</sup>
<institution>Graduate Program of Remote Sensing</institution>, <institution>National Institute for Space Research (INPE)</institution>, <institution>Earth Observation and Geoinformatics Division (DIOTG)</institution>, <addr-line>S&#xe3;o Jos&#xe9; dos Campos</addr-line>, <country>Brazil</country>
</aff>
<aff id="aff2">
<sup>2</sup>
<institution>Phenology Lab</institution>, <institution>Institute of Biosciences</institution>, <institution>Despartment of Biodiversity</institution>, <institution>S&#xe3;o Paulo State University (UNESP)</institution>, <addr-line>Rio Claro</addr-line>, <country>Brazil</country>
</aff>
<aff id="aff3">
<sup>3</sup>
<institution>Ecosystem Dynamics Observatory (EcoDyn)</institution>, <institution>Biological and Environmental Sciences</institution>, <institution>Faculty of Natural Sciences</institution>, <institution>University of Stirling</institution>, <addr-line>Stirling</addr-line>, <addr-line>Scotland</addr-line>, <country>United Kingdom</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>
<bold>Edited by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1497097/overview">Russell Doughty</ext-link>, University of Oklahoma, United States</p>
</fn>
<fn fn-type="edited-by">
<p>
<bold>Reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1372438/overview">Nathan Moore</ext-link>, Michigan State University, United States</p>
<p>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2108243/overview">Weiwei Zhang</ext-link>, Suzhou University of Science and Technology, China</p>
</fn>
<corresp id="c001">&#x2a;Correspondence: Tha&#xed;s Pereira de Medeiros, <email>thais.pereira@inpe.br</email>; Thiago Sanna Freire Silva, <email>thiago.sf.silva@stir.ac.uk</email>
</corresp>
<fn fn-type="other">
<p>This article was submitted to Environmental Informatics and Remote Sensing, a section of the journal Frontiers in Environmental Science</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>10</day>
<month>02</month>
<year>2023</year>
</pub-date>
<pub-date pub-type="collection">
<year>2023</year>
</pub-date>
<volume>11</volume>
<elocation-id>1083328</elocation-id>
<history>
<date date-type="received">
<day>28</day>
<month>10</month>
<year>2022</year>
</date>
<date date-type="accepted">
<day>26</day>
<month>01</month>
<year>2023</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2023 Medeiros, Morellato and Silva.</copyright-statement>
<copyright-year>2023</copyright-year>
<copyright-holder>Medeiros, Morellato and Silva</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>Modern UAS (Unmanned Aerial Vehicles) or just drones have emerged with the primary goal of producing maps and imagery with extremely high spatial resolution. The refined information provides a good opportunity to quantify the distribution of vegetation across heterogeneous landscapes, revealing an important strategy for biodiversity conservation. We investigate whether computer vision and machine learning techniques (Object-Based Image Analysis&#x2014;OBIA method, associated with Random Forest classifier) are effective to classify heterogeneous vegetation arising from ultrahigh-resolution data generated by UAS images. We focus our fieldwork in a highly diverse, seasonally dry, complex mountaintop vegetation system, the <italic>campo rupestre</italic> or rupestrian grassland, located at Serra do Cip&#x00F3;, Espinha&#x00E7;o Range, Southeastern Brazil. According to our results, all classifications received general accuracy above 0.95, indicating that the methodological approach enabled the identification of subtle variations in species composition, the capture of detailed vegetation and landscape features, and the recognition of vegetation types&#x2019; phenophases. Therefore, our study demonstrated that the machine learning approach and combination between OBIA method and Random Forest classifier, generated extremely high accuracy classification, reducing the misclassified pixels, and providing valuable data for the classification of complex vegetation systems such as the <italic>campo rupestre</italic> mountaintop grassland.</p>
</abstract>
<kwd-group>
<kwd>UAS</kwd>
<kwd>unmanned aerial system</kwd>
<kwd>machine learning</kwd>
<kwd>random forest.</kwd>
<kwd>heterogeneous vegetation</kwd>
<kwd>rupestrian grassland</kwd>
<kwd>phenology</kwd>
</kwd-group>
<contract-num rid="cn001">&#x23;2010/521113-5 &#x23;2009/54208-6 &#x23;2019/03269-7</contract-num>
<contract-sponsor id="cn001">Funda&#xe7;&#xe3;o de Amparo &#xe0; Pesquisa do Estado de S&#xe3;o Paulo<named-content content-type="fundref-id">10.13039/501100001807</named-content>
</contract-sponsor>
<contract-sponsor id="cn002">Conselho Nacional de Desenvolvimento Cient&#xed;fico e Tecnol&#xf3;gico<named-content content-type="fundref-id">10.13039/501100003593</named-content>
</contract-sponsor>
</article-meta>
</front>
<body>
<sec id="s1">
<title>1 Introduction</title>
<p>Monitoring of grassland resources is important to guarantee the regional grassland conservation, management, and sustainable development (<xref ref-type="bibr" rid="B24">Fernandes et al., 2020</xref>). Through the development of science and new technologies the grassland ecosystems monitoring has been increasingly enriched (<xref ref-type="bibr" rid="B41">Lyu et al., 2020</xref>; <xref ref-type="bibr" rid="B40">2022</xref>) and can make a unique contribution to the ecology and restoration this forgotten ecosystems. Traditional satellite remote sensing technology is a relevant means of monitoring regional grassland ecosystems, but the images obtained by satellite remote sensing tend to have low spatial resolution and the revisit cycle is too long (<xref ref-type="bibr" rid="B56">Peci&#xf1;a et al., 2019</xref>; <xref ref-type="bibr" rid="B8">Balasubramanian et al., 2020</xref>).</p>
<p>Recent developments for mapping and assessing ecosystem functions and services require information at a very detailed spatial resolution (<xref ref-type="bibr" rid="B76">Villoslada et al., 2020</xref>). In this sense, Unmanned Aerial Systems (UAS), also known as Unmanned Aerial Vehicles (UAV), Remotely Piloted Aerial Systems (RPAS), or simply &#x201c;drones&#x201d;, have advanced rapidly to satisfy these needs, and have been increasingly used for ecology, environmental and grassland studies (<xref ref-type="bibr" rid="B6">Anderson and Gaston, 2013</xref>; <xref ref-type="bibr" rid="B17">Colomina and Molina, 2014</xref>; <xref ref-type="bibr" rid="B76">Villoslada et al., 2020</xref>).</p>
<p>Modern UAS enables the production of maps and imagery with extremely high spatial resolution and can be a low-cost tool for monitoring vegetation at the landscape scale (<xref ref-type="bibr" rid="B82">Whitehead and Hugenholtz, 2014</xref>; <xref ref-type="bibr" rid="B83">Xie, Zhang, and Welsh, 2015</xref>; <xref ref-type="bibr" rid="B78">Wang et al., 2019a</xref>; <xref ref-type="bibr" rid="B71">Sun et al., 2021</xref>). With pixel-size in the order of centimeters (or even millimeters), UAS imagery has been used to extract structural and functional properties of environments, ranging from individuals to ecosystems (<xref ref-type="bibr" rid="B6">Anderson and Gaston, 2013</xref>; <xref ref-type="bibr" rid="B68">Singh and Frazier, 2018</xref>; <xref ref-type="bibr" rid="B74">Valbuena et al., 2020</xref>; <xref ref-type="bibr" rid="B71">Sun et al., 2021</xref>). The refined information provides a good opportunity to obtain and quantify the distribution of vegetation across heterogeneous landscapes, capturing variations in plant community cover and revealing an important strategy for biodiversity conservation (<xref ref-type="bibr" rid="B7">Baena, Boyd, and Moat, 2018</xref>; <xref ref-type="bibr" rid="B78">Wang et al., 2019a</xref>; <xref ref-type="bibr" rid="B21">D&#xed;az-Delgado, Cazacu, and Adamescu, 2019</xref>; <xref ref-type="bibr" rid="B59">Prentice et al., 2021</xref>).</p>
<p>UAS can also offer very good temporal resolution (<xref ref-type="bibr" rid="B51">Nex and Remondino, 2013</xref>; <xref ref-type="bibr" rid="B32">Kampen et al., 2019</xref>) and have become more affordable and capable of offering autonomous and flexible functionalities, such as minimization of user intervention, including the ability to plan and conduct surveys to collect aerial photography (<xref ref-type="bibr" rid="B30">Hassanalian and Abdelkefi, 2017</xref>; <xref ref-type="bibr" rid="B29">Hamylton et al., 2020</xref>). Also, UAS are easy to operate and remotely controlled, allowing the users to go into areas that are inaccessible to humans (<xref ref-type="bibr" rid="B23">Feng et al., 2021</xref>). Moreover, UAS remote sensing systems are composed of at least five components: [1] Platform system; [2] Sensor system; [3] Ground control and data transmission system; [4] Data processing system, and [5] Operators (<xref ref-type="bibr" rid="B70">Sun et al., 2017</xref>).</p>
<p>Due to advantages in flexibility and high temporal and spatial resolution, UAS remote sensing technology has been applied in many fields, e.g., geography, ecology, and environmental science (<xref ref-type="bibr" rid="B1">Adao et al., 2017</xref>; <xref ref-type="bibr" rid="B61">Reis et al., 2019</xref>). In the case of grassland studies with UAS imagery it is possible to capture spectral differences between different vegetation species and effectively improve the accuracy of species identification on smaller vegetation units, providing a technical support for the fine monitoring of grassland vegetation species composition (<xref ref-type="bibr" rid="B66">Schmidt et al., 2017</xref>). Another important application of UAS imagery is in grassland degradation monitoring, including studies concerning degradation of total vegetation (biomass and productivity), degradation of vegetation structure (reduction of plant height and increase of weeds), processes that pose several challenges to grassland management and sustainable development (<xref ref-type="bibr" rid="B41">Lyu et al., 2020</xref>). The hyperspectral and hyperspatial remote sensing technology can further obtain more-in-depth data information and help in monitoring of grassland vegetation structure (<xref ref-type="bibr" rid="B40">Lyu et al., 2022</xref>).</p>
<p>In addition, besides the affordable RGB cameras, a wide variety of vegetation mapping sensors can be operated from a UAS platform, including multispectral sensors focused on red and near-infrared wavebands, hyperspectral sensors that detect reflectance at many wavelengths, and, the range of LiDAR sensors (light detection and ranging), indicated to study the degree of vegetative ground cover and characterize canopy structure (<xref ref-type="bibr" rid="B17">Colomina and Molina, 2014</xref>; <xref ref-type="bibr" rid="B29">Hamylton et al., 2020</xref>). RGB cameras are most used due, mainly, to their low cost and ease of use (<xref ref-type="bibr" rid="B58">Pichon et al., 2019</xref>), while multispectral sensors can better evaluate plant health and stress status, due to their highest spectral resolution (<xref ref-type="bibr" rid="B79">Wang et al., 2019b</xref>). Finally, as another advantage of UAS, ultra-low altitude flying can reduce the effect of clouds on imagery and thereby improve the data quality (<xref ref-type="bibr" rid="B80">Watts, Ambrosia, and Hinkley, 2012</xref>; <xref ref-type="bibr" rid="B71">Sun et al., 2021</xref>).</p>
<p>One of the applications of Remote Sensing is the use of imagery to classify and delineate different objects and land cover types on the Earth&#x2019;s surface, a process that involves collecting field data from a series of samples as an input for training a classification model (<xref ref-type="bibr" rid="B86">Zou and Greenberg, 2019</xref>; <xref ref-type="bibr" rid="B59">Prentice et al., 2021</xref>). Classification in Remote Sensing involves the categorization of response functions recorded in imagery as representations of real-world objects, according to their spectral similarity to the initial values overlapping the samples, which can provide detailed information about land-cover, specifically in a mixed forest-grassland (<xref ref-type="bibr" rid="B19">Corbane et al., 2015</xref>; <xref ref-type="bibr" rid="B20">Cullum et al., 2016</xref>; <xref ref-type="bibr" rid="B29">Hamylton et al., 2020</xref>; <xref ref-type="bibr" rid="B85">Zhang et al., 2021</xref>).</p>
<p>In the context of climate change and rapid transformation of grassland environment due to anthropogenic activities, mainly during the lasted four decades (<xref ref-type="bibr" rid="B24">Fernandes et al., 2020</xref>; <xref ref-type="bibr" rid="B95">Buisson et al., 2022</xref>), the grassland classification is crucial for its management. The identification of grassland classes provides the basis for the protection of grassland resources and for the reconstruction and restoration of a grassland ecological environment (<xref ref-type="bibr" rid="B47">Meng et al., 2022</xref>).</p>
<p>Conventional platforms are characterized by lack of spatial detail to solve and classify fine landscape features (individual trees and shrubs), requiring large amounts of cost and resources (<xref ref-type="bibr" rid="B78">Wang et al., 2019a</xref>; <xref ref-type="bibr" rid="B47">Meng et al., 2022</xref>). In this regard, hyperspatial tools, such as UAS imagery, have been successfully applied in many fine-scale classification studies, allowing the identification of subtle variations in species composition, and capturing detailed vegetation and landscape features, for instance, shadows, stems, and canopy gaps (<xref ref-type="bibr" rid="B35">Laliberte and Rango, 2013</xref>). According to a study performed by <xref ref-type="bibr" rid="B15">Christian and Christiane (2014)</xref>, data collected from UAS can capture more information about environment composition and structure. Thus, using UAS high-resolution cameras is one of the most preferred methods to classify land cover in a mixed savannas-grassland ecosystem (<xref ref-type="bibr" rid="B85">Zhang et al., 2021</xref>).</p>
<p>However, there are still several challenges in using this strategy, including the development of appropriate procedures to manage and extract information from high-volume and hyperspatial resolution data. One of the challenges is how to deal with shadows cast by trees, high intra-class spectral variation, and high inter-class spectral heterogeneity (<xref ref-type="bibr" rid="B39">Lu and Weng, 2007</xref>; <xref ref-type="bibr" rid="B38">Lu and He, 2017</xref>; <xref ref-type="bibr" rid="B9">Berra, Gaulton, and Barr, 2019</xref>).</p>
<p>The classification process can be divided into unsupervised and supervised classification algorithms, revealing it as a crucial tool to achieve interpretable results. Unsupervised classification techniques group pixels or segments according to their similarity using a variety of different algorithms. On the other hand, supervised classification requires the use of training sample areas, in other words, spectral signatures of the objects under study (<xref ref-type="bibr" rid="B65">Schafer et al., 2016</xref>; <xref ref-type="bibr" rid="B53">Nogueira et al., 2019</xref>).</p>
<p>The application of conventional classification techniques on hyperspatial imagery can result in the misclassification of pixels with identical spectral responses to different classes, causing an effect known as &#x201c;salt and pepper&#x201d;. Furthermore, traditional supervised classifiers assume the normal distribution of remote sensing datasets (<xref ref-type="bibr" rid="B31">Jensen, 2015</xref>), but these datasets can deal with complex and non-linear relations, thus conventional classification algorithms do not fit to classify the ultrahigh remote sensing data (<xref ref-type="bibr" rid="B72">Thessen, 2016</xref>; <xref ref-type="bibr" rid="B54">Oddi et al., 2019</xref>).</p>
<p>Some strategies have been developed to solve these problems: A. Inclusion of texture features in a classifier; B. Object-Based Image Analysis (OBIA), in which an image is pre-segmented into polygons; C. Using computer vision and machine learning techniques, such as random forest, support vector machine and convolutional neural networks (<xref ref-type="bibr" rid="B52">Nguyen et al., 2019</xref>; <xref ref-type="bibr" rid="B86">Zou and Greenberg, 2019</xref>). In addition, the accuracy of vegetation identification will depend on four factors: spatial resolution, spectral resolution, habitat complexity, and classification algorithms.</p>
<p>Object-Based Image Analysis (OBIA), more specifically Geographic Object-Based Image Analysis (GEOBIA), has provided important methodological refinements for high-resolution image classification, having advantages over traditional pixel-based methods, and is considered a superior classification technique (<xref ref-type="bibr" rid="B37">Liu and Abd-Elrahman, 2018</xref>; <xref ref-type="bibr" rid="B22">Dujon and Schofield, 2019</xref>). GEOBIA methods are based on pre-clustering the image pixel into homogeneous objects (regions, clusters), according to specific spectral characteristics (features, color, texture), and shape characteristics (<xref ref-type="bibr" rid="B10">Blaschke, 2010</xref>).</p>
<p>Machine Learning (ML) approach, one of the most popular latest technologies in the fourth industrial revolution (<xref ref-type="bibr" rid="B64">Sarker, 2021</xref>), has been developed as a response to the rigidity of many other computer programs in comparison with the world&#x2019;s infinite versatility. Emerged within the field of artificial intelligence, ML aims to deal with complex data and to learn without being explicitly programmed (<xref ref-type="bibr" rid="B43">Mahesh, 2020</xref>; <xref ref-type="bibr" rid="B59">Prentice et al., 2021</xref>). The data and desired result are provided to a learning algorithm (a &#x201c;learner&#x201d;), which then generates the algorithm that turns one into the other (<xref ref-type="bibr" rid="B22">Dujon and Schofield, 2019</xref>; <xref ref-type="bibr" rid="B29">Hamylton et al., 2020</xref>). ML algorithms are divided into four categories: Supervised learning, Unsupervised learning, Semi-supervised learning, and Reinforcement learning. In the case of Supervised learning (the category focused on the research), the most common supervised tasks are the &#x201c;classification algorithms&#x201d;, which separate the data, and the &#x201c;regression models&#x201d;, which fit the data, but do not offer predictive capability (<xref ref-type="bibr" rid="B64">Sarker, 2021</xref>). The supervised learning technique, specifically the classification process, has been successfully applied to sequential RGB images from ground digital cameras focused on species recognition, and in studies that involve ecology and Earth sciences applications (<xref ref-type="bibr" rid="B5">Almeida et al., 2014</xref>; <xref ref-type="bibr" rid="B4">2016</xref>; <xref ref-type="bibr" rid="B72">Thessen, 2016</xref>; <xref ref-type="bibr" rid="B45">Maxwell, Warner, and Fang, 2018</xref>).</p>
<p>In ecology, the Random Forests algorithm is commonly used to perform the supervised classification process, followed by maximum likelihood classification, support vector machine, K-means clustering, convolution neural networks, and thresholding (<xref ref-type="bibr" rid="B22">Dujon and Schofield, 2019</xref>). Random Forest (RF) is an ensemble learning technique based on decision trees, which has higher accuracy when compared to traditional methods, enabling the simultaneous classification of features based on a set of training samples, making possible the determination of the best performing explanatory variables (<xref ref-type="bibr" rid="B11">Breiman, 2001</xref>).</p>
<p>Considering the advantages and challenges explained, the present work aimed to answer the following questions: Are the machine learning techniques effective to process the ultrahigh-resolution data generated by UAS images? Is the OBIA an effective method for classifying heterogeneous vegetation? Using a Random Forest classifier, what is the better combination of parameters for proposing a high quality of ultrahigh spatial resolution classification? We focus our fieldwork on a highly diverse, seasonally dry, complex mountaintop vegetation system, the <italic>campo rupestre</italic> or rupestrian grassland, considered a vegetation mosaic or a continental archipelago (<xref ref-type="bibr" rid="B48">Morellato and Silveira, 2018</xref>; <xref ref-type="bibr" rid="B44">Mattos et al., 2019</xref>; <xref ref-type="bibr" rid="B75">Vasconcelos et al., 2020</xref>). The mosaic formed by the vegetation types, from grasslands, and rocky outcrops to cerrado and forest patches, poses a challenge to accurately classify <italic>campo rupestre</italic> vegetation diversity at a landscape scale, a prime measure for the conservation of this threatened ecosystem (<xref ref-type="bibr" rid="B25">Fernandes et al., 2018</xref>; <xref ref-type="bibr" rid="B24">2020</xref>). This high diversity of vegetation types cannot be distinguished in conventional imagery, like Sentinel-2 or even Planetscope imagery, but in UAV imagery this is possible.</p>
</sec>
<sec sec-type="materials|methods" id="s2">
<title>2 Materials and methods</title>
<sec id="s2-1">
<title>2.1 Study area</title>
<p>The Espinha&#xe7;o Mountain Range, in central Brazil, is a known center of plant diversity with more than 5,000 recognized plant species (<xref ref-type="bibr" rid="B60">Rapini et al., 2008</xref>; <xref ref-type="bibr" rid="B67">Silveira et al., 2016</xref>). The Southern Espinha&#xe7;o Range is recognized as a phytogeographic province, having its type locality at the Serra do Cip&#xf3; National Park (<xref ref-type="bibr" rid="B28">Giulietti, Pirani, and Harley, 1997</xref>; <xref ref-type="bibr" rid="B16">Colli-Silva, Vasconcelos, and Pirani, 2019</xref>), while Serra do Cip&#xf3; comprises only c.a. 200&#xa0;km<sup>2</sup> (less than 5% of entire range), still hosts more than one-third of Espinha&#xe7;o&#x2019;s total biodiversity. Among the represented vegetation types, the region is known for its highly heterogeneous <italic>campo rupestre</italic> (rupestrian grasslands) vegetation (<xref ref-type="bibr" rid="B44">Mattos et al., 2019</xref>). <italic>Campo rupestre</italic> is a component of the <italic>Cerrado</italic> floristic domain (Brazilian savanna), dominated by grasslands and restricted to areas of shallow soil and rugged topography above 900&#xa0;m (<xref ref-type="bibr" rid="B67">Silveira et al., 2016</xref>; <xref ref-type="bibr" rid="B48">Morellato and Silveira, 2018</xref>).</p>
<p>
<italic>Campo rupestre</italic> is characterized by extremely high plant species richness and endemism (<xref ref-type="bibr" rid="B26">Fernandes, 2016</xref>), which has been mostly explained by long-term climatic stability, the so-called Old Climatic Buffered Infertile Landscapes (OCBILS) (<xref ref-type="bibr" rid="B67">Silveira et al., 2016</xref>; <xref ref-type="bibr" rid="B48">Morellato and Silveira, 2018</xref>). During the 18th and 19th centuries, the Espinha&#xe7;o Range and particularly Serra do Cip&#xf3; were impacted by human activities linked to gold and diamond exploration, and with the decline of mineral deposits by the end of the 19th century the main economic activity of the region has switched towards tourism (<xref ref-type="bibr" rid="B60">Rapini et al., 2008</xref>; <xref ref-type="bibr" rid="B25">Fernandes et al., 2018</xref>; <xref ref-type="bibr" rid="B24">2020</xref>).</p>
<p>The climate in Serra do Cip&#xf3; region is strongly seasonal, with a dry and cold season from April to September, and a wet and warm season from October to March. The mean total annual precipitation is approximately 1,600&#xa0;mm, and mean annual temperatures are around 21&#xb0;C (<xref ref-type="bibr" rid="B36">Le Stradic et al., 2018</xref>). According to the Brazilian National Water Agency (Ag&#xea;ncia Nacional de &#xc1;guas, in Portuguese), the dry season has a monthly rainfall of 10&#xa0;mm or less, while in the wet season monthly rainfall is around 230&#xa0;mm.</p>
</sec>
<sec id="s2-2">
<title>2.2 Drone imagery and field data collection</title>
<p>Drone image acquisition was done within the private conservation area belonging to CEDRO T&#xea;xtil S.A. (located between latitudes of 19&#xb0;12&#x2032;S and 19&#xb0;34&#x2032;S and, longitudes of 43&#xb0;27&#x2032;W and 43&#xb0;38&#x2032;W), contained within the Morro da Pedreira Environmental Protection Area (<xref ref-type="fig" rid="F1">Figure 1</xref>), locally named as Cedro (<xref ref-type="bibr" rid="B44">Mattos et al., 2019</xref>).</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption>
<p>The location of the Cedro study site, a private conservation area belonging to CEDRO T&#xea;xtil S.A., in the Serra do Cip&#xf3; and within the Morro da Pedreira Environmental Protection Area, where the imagery was performed.</p>
</caption>
<graphic xlink:href="fenvs-11-1083328-g001.tif"/>
</fig>
<p>Aerial photos were acquired using a fixed-wing UAS, which your characteristics can be viewed in <xref ref-type="table" rid="T1">Table 1</xref>. Flight lines, number of photos, and spatial resolution were automatically calculated based on informed sensor size, focal length, the flying height of 120&#xa0;m above ground, and 80% overlap between consecutive flight lines. The aerial imaging mission covered a core 800 &#xd7; 800&#xa0;m square area (64&#xa0;ha), yielding final orthomosaics with a nominal spatial resolution of 5&#xa0;cm/pixel. The area was imaged at monthly intervals from February 2016 to February 2017, totaling 12 flights.</p>
<table-wrap id="T1" position="float">
<label>TABLE 1</label>
<caption>
<p>Description and characterization of all equipment (Camera, Firmware, and Flight) installed on the fixed-wing UAS.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center"/>
<th align="center">Drone</th>
<th align="center">Camera</th>
<th align="center">Firmware</th>
<th align="center">Flight</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>Description</italic>
</td>
<td align="center">Fixed-wing UAS, built by the Brazilian company G-Drones</td>
<td align="center">CANON SX260 RGB camera</td>
<td align="center">CHDK custom firmware was installed on the camera</td>
<td align="center">Pixhawk V1 autopilot board running the ArduPlane 3.4 open source flight controller software</td>
</tr>
<tr>
<td align="center">
<italic>Characteristics</italic>
</td>
<td align="center">190&#xa0;cm wingspan, 2.5&#xa0;kg, based on the widely available Skywalker model plane frame</td>
<td align="center">12 megapixels (6.2&#xa0;mm &#xd7; 4.6&#xa0;mm sensor, 4,000 x 3,000 pixels) of resolution and a focal length of 4.5&#xa0;mm</td>
<td align="center">The automated KAP - UAS script was run within CHDK custom firmware, for automatic interval triggering during flight</td>
<td align="center">The flight can be planned and executed using the Mission Planner Software</td>
</tr>
<tr>
<td align="center">
<italic>Reference link</italic>
</td>
<td align="center">
<ext-link ext-link-type="uri" xlink:href="http://www.g-drones.com.br/drones/">http://www.g-drones.com.br/drones/</ext-link>
</td>
<td align="center">
<ext-link ext-link-type="uri" xlink:href="https://www.loja.canon.com.br/pt/canonbr/">https://www.loja.canon.com.br/pt/canonbr/</ext-link>
</td>
<td align="center">
<ext-link ext-link-type="uri" xlink:href="https://chdk.fandom.com/">https://chdk.fandom.com</ext-link>
</td>
<td align="center">
<ext-link ext-link-type="uri" xlink:href="http://ardupilot.org/plane/">http://ardupilot.org/plane/</ext-link>
</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The resulting aerial photos were mosaicked and orthorectified using the Pix4D Mapper 3.1 Educational software (<ext-link ext-link-type="uri" xlink:href="https://www.pix4d.com/">https://www.pix4d.com/</ext-link>), using a proprietary implementation of the Structure from Motion (SfM) algorithm. SfM is a computer vision technique capable of extracting an individual&#x2019;s reference points for automatic alignment and positioning of aerial photos, then generating a tridimensional point cloud, which can then be further transformed into a Digital Surface Model (DSM), which is in turn used to produce the resulting orthomosaic (<xref ref-type="bibr" rid="B81">Westoby et al., 2012</xref>).</p>
<p>In July 2018, we performed a fieldwork in-person in Serra do Cip&#xf3; to collect georeferenced ground points, according to the predefined mapping classes (see Classification section). At each sampling point, the main vegetation/cover class was identified, and the location was logged using a Garmin GPSMAP 64&#xa0;s GPS receiver. These ground points were used to support the sample collection process for each spectral class.</p>
</sec>
<sec id="s2-3">
<title>2.3 Orthomosaic registration</title>
<p>Due to the low accuracy of embedded camera GPS information, further manual co-registration was necessary to align the times series of orthomosaics (<xref ref-type="bibr" rid="B73">Tsai and Lin, 2017</xref>). This step was performed on the ENVI 5.0 software. We chose as reference image the best image in the series in terms of brightness, and shading, as well as the closest alignment to Google Maps&#x2122; aerial imagery, which corresponded to the image acquired on 2016-09-25.</p>
<p>We then selected fifty Ground Control Points (GCPs), between the reference image and each image to be co-registered, and then, applied a second-order polynomial transformation with nearest neighbor pixel resampling, using a standardized 5&#xa0;cm/pixel grid. Estimating post-registration accuracy, we selected further GCPs between the reference image and a subsample of the imaged dates and then calculated horizontal, vertical, and Euclidean distance displacement between each image pair. The chosen dates for positional validation were 2016-02-23, 2016-05-22, 2016-08-16, and 2016-11-30.</p>
</sec>
<sec id="s2-4">
<title>2.4 Image classification</title>
<p>From the available drone image time series, we selected two dates, 2016-09-25 (dry season) and 2017-01-05 (wet season), which represented different phenological stages of the vegetation (<xref ref-type="bibr" rid="B36">Le Stradic et al., 2018</xref>). We then classified the two images separately, as well as combined the stack of the two images.</p>
<p>The classification approach followed the usual framework of GEOBIA methods, composed of image object generation, feature extraction, and object classification (<xref ref-type="bibr" rid="B10">Blaschke, 2010</xref>). We generated image objects using the Shepherd image segmentation algorithm implemented on free and open-source Remote Sensing and GIS Software Library (RSGISLib), based on K-means clustering and accessible through the Python programming language (<xref ref-type="bibr" rid="B12">Bunting, Clewley, and Lucas, 2014</xref>). This algorithm takes the following main parameters: NumClusters, MinPxls, DistThres, Sampling, and KmMaxIter (<xref ref-type="table" rid="T2">Table 2</xref>).</p>
<table-wrap id="T2" position="float">
<label>TABLE 2</label>
<caption>
<p>Description of the parameters used in the RSGISLib software for segmenting drone images from Serra do Cip&#xf3; (MG, Brazil) to support rupestrian grassland vegetation type classification.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left">Parameter</th>
<th align="left">Mean and description</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">
<italic>NumClusters</italic>
</td>
<td align="left">Number of K-means clusters&#x2014;controls the general homogeneity and thus the number of resulting objects. More clusters &#x3d; more homogeneity, more objects</td>
</tr>
<tr>
<td align="left">
<italic>MinPxls</italic>
</td>
<td align="left">Minimum size of the resulting objects</td>
</tr>
<tr>
<td align="left">
<italic>DistThres</italic>
</td>
<td align="left">Distance threshold merging neighboring similar clusters</td>
</tr>
<tr>
<td align="left">
<italic>Sampling</italic>
</td>
<td align="left">Number of pixels sampled for determining (K-means) clusters</td>
</tr>
<tr>
<td align="left">
<italic>KmMaxIter</italic>
</td>
<td align="left">Maximum number of iterations for K-means clustering</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>We tested ten different combinations of these parameters (<xref ref-type="table" rid="T3">Table 3</xref>) to determine the one that produced the balance between the numbers of resulting objects vs. object homogeneity. This assessment was done visually by inspecting the resulting objects overlaid on the base segmentation image, as usual for GEOBIA applications.</p>
<table-wrap id="T3" position="float">
<label>TABLE 3</label>
<caption>
<p>Combinations of image segmentation parameters (NumClusters, MinPxls, DistThres, Sampling, and KmMaxIter, see <xref ref-type="table" rid="T1">Table 1</xref>) for generating image objects to classify rupestrian grassland vegetation types on UAV images from Serra do Cip&#xf3; (MG, Brazil).</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">ID</th>
<th align="center">NumClusters</th>
<th align="center">MinPxls</th>
<th align="center">DistThres</th>
<th align="center">Sampling</th>
<th align="center">KmMaxIter</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>1</italic>
</td>
<td align="center">20</td>
<td align="center">100</td>
<td align="center">1,000</td>
<td align="center">100</td>
<td align="center">200</td>
</tr>
<tr>
<td align="center">
<italic>2</italic>
</td>
<td align="center">10</td>
<td align="center">50</td>
<td align="center">1,000</td>
<td align="center">500</td>
<td align="center">100</td>
</tr>
<tr>
<td align="center">
<italic>3</italic>
</td>
<td align="center">5</td>
<td align="center">500</td>
<td align="center">5,000</td>
<td align="center">200</td>
<td align="center">100</td>
</tr>
<tr>
<td align="center">
<italic>4</italic>
</td>
<td align="center">20</td>
<td align="center">400</td>
<td align="center">4,000</td>
<td align="center">300</td>
<td align="center">200</td>
</tr>
<tr>
<td align="center">
<italic>5</italic>
</td>
<td align="center">30</td>
<td align="center">30</td>
<td align="center">5,000</td>
<td align="center">100</td>
<td align="center">200</td>
</tr>
<tr>
<td align="center">
<italic>6</italic>
</td>
<td align="center">10</td>
<td align="center">500</td>
<td align="center">5,000</td>
<td align="center">100</td>
<td align="center">200</td>
</tr>
<tr>
<td align="center">
<italic>7</italic>
</td>
<td align="center">30</td>
<td align="center">400</td>
<td align="center">5,000</td>
<td align="center">100</td>
<td align="center">200</td>
</tr>
<tr>
<td align="center">
<italic>8</italic>
</td>
<td align="center">25</td>
<td align="center">450</td>
<td align="center">3,000</td>
<td align="center">100</td>
<td align="center">200</td>
</tr>
<tr>
<td align="center">
<italic>9</italic>
</td>
<td align="center">15</td>
<td align="center">450</td>
<td align="center">5,000</td>
<td align="center">100</td>
<td align="center">200</td>
</tr>
<tr>
<td align="center">
<italic>10</italic>
</td>
<td align="center">20</td>
<td align="center">600</td>
<td align="center">5,000</td>
<td align="center">100</td>
<td align="center">200</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Our classification key was comprised of nine spectral classes: &#x201c;Sandy Grassland&#x201d;, &#x201c;Stony Grassland&#x201d;, &#x201c;Wet Grassland&#x201d;, &#x201c;Rocky Outcrop&#x201d;, &#x201c;Cerrado Scrubland&#x201d;, and &#x201c;Riparian Forest&#x201d; vegetation types (<xref ref-type="bibr" rid="B48">Morellato and Silveira, 2018</xref>; <xref ref-type="bibr" rid="B44">Mattos et al., 2019</xref>), as well as &#x201c;Bare Soil&#x201d;, &#x201c;Bare Rocks&#x201d; and &#x201c;Water Body&#x201d;, non-vegetation cover (<xref ref-type="table" rid="T4">Table 4</xref>). We defined class training samples by delineating polygons over the UAS images, guided by a subsample of 70% of the ground observation, while the remaining 30% were destined for accuracy assessment.</p>
<table-wrap id="T4" position="float">
<label>TABLE 4</label>
<caption>
<p>Spectral classes used for drone image classification of rupestrian grasslands at Serra do Cip&#xf3; (MG, Brazil).</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Spectral class</th>
<th align="center">Characteristics</th>
<th align="center">Photo</th>
<th align="center">Number of samples collected</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>Bare soil</italic>
</td>
<td align="center">Roads and highways in general</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx1.tif"/>
</td>
<td align="left">110</td>
</tr>
<tr>
<td align="center">
<italic>Bare rock</italic>
</td>
<td align="center">Surface with complete absence of vegetation</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx2.tif"/>
</td>
<td align="left">150</td>
</tr>
<tr>
<td align="center">
<italic>Riparian forest</italic>
</td>
<td align="center">Forested formations along rivers and streams</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx3.tif"/>
</td>
<td align="left">224</td>
</tr>
<tr>
<td align="center">
<italic>Water body</italic>
</td>
<td align="center">Rivers and streams</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx4.tif"/>
</td>
<td align="left">90</td>
</tr>
<tr>
<td align="center">
<italic>Rocky outcrops</italic>
</td>
<td align="center">Rocky outcrops with significant and characteristic vegetation cover</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx5.tif"/>
</td>
<td align="left">441</td>
</tr>
<tr>
<td align="center">
<italic>Cerrado scrubland</italic>
</td>
<td align="center">Herbaceous/shrub formations with sparse bushes, found on shallow soils of low fertility</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx6.tif"/>
</td>
<td align="left">165</td>
</tr>
<tr>
<td align="center">
<italic>Wet grassland</italic>
</td>
<td align="center">Herbaceous/shrub formations that occur in areas with groundwater upwelling or depressions that accumulate water during the rainy season</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx7.tif"/>
</td>
<td align="left">214</td>
</tr>
<tr>
<td align="center">
<italic>Stony grassland</italic>
</td>
<td align="center">Herbaceous formations, with rare bushes, a complete absence of trees, and the presence of stony/pebble substratum</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx8.tif"/>
</td>
<td align="left">184</td>
</tr>
<tr>
<td align="center">
<italic>Sandy grassland</italic>
</td>
<td align="center">Herbaceous formations growing on dry and nutrient-poor sandy soils with high porosity, permeability, and erosion susceptibility</td>
<td align="center">
<inline-graphic xlink:href="FENVS_fenvs-2023-1083328_wc_tfx9.tif"/>
</td>
<td align="left">292</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Image classification was applied using the Random Forest approach, a machine learning algorithm from the Sci-Kit Learn Python library (<xref ref-type="bibr" rid="B57">Pedregosa et al., 2011</xref>), accessible in the RSGISLib library (<ext-link ext-link-type="uri" xlink:href="https://www.rsgislib.org/">https://www.rsgislib.org/</ext-link>). This approach can be applied to quickly identify best-performing features and build a robust classification model (<xref ref-type="bibr" rid="B62">Rodriguez-Galiano et al., 2012</xref>). The main parameters of the Random Forest algorithm are <italic>n-estimators</italic> and <italic>max-features</italic>, the first referring to the number of decision trees generated by the classifier, and the second representing the maximum number of randomly chosen attributes considered by the algorithm when creating nodes on each decision tree. We tested three combinations of parameters, varying in the number of decision trees: 5, 200, and 500 numbers of decision trees.</p>
<p>Assessing the resulting accuracy of each classification, we computed the confusion matrix, general accuracy, and Kappa index of agreement (<xref ref-type="bibr" rid="B18">Congalton, 1991</xref>) for each classification, based on the reference data, using the custom &#x2018;<italic>rsacc</italic>&#x2019; package developed in R (<ext-link ext-link-type="uri" xlink:href="https://github.com/EcoDyn/rsacc">https://github.com/EcoDyn/rsacc</ext-link>).</p>
<p>The overview of the methodological procedures can be visualized in <xref ref-type="fig" rid="F2">Figure 2</xref>.</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption>
<p>Flowchart with methodological procedures performed in the research, divided into four main steps: [1] Drone imagery (pink); [2] Data field collection (yellow); [3] Orthomosaic registration (green), and [4] Image classsification (blue).</p>
</caption>
<graphic xlink:href="fenvs-11-1083328-g002.tif"/>
</fig>
</sec>
</sec>
<sec sec-type="results" id="s3">
<title>3 Results</title>
<p>The results of orthomosaic registration revealed the importance of complete alignment of the time series to obtaining concrete, effective and accurate results in the classification process. <xref ref-type="fig" rid="F3">Figure 3</xref> shows the comparison between the Euclidean distance displacement before and after the registration process, revealing the improvement in the quality and accuracy of images generated by UAS.</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption>
<p>Results of orthomosaic registration for four dates of the temporal series images from rupestrian grasslands (Serra do Cip&#xf3;, MG), comparing the Euclidean distance displacement before and after registration processes. Drone flight image dates from <bold>(A)</bold> 23 of February 2016; <bold>(B)</bold> 22 of May 2016; <bold>(C)</bold> 16 of August 2016; <bold>(D)</bold> 30 of November 2017.</p>
</caption>
<graphic xlink:href="fenvs-11-1083328-g003.tif"/>
</fig>
<p>Regarding the segmentation process, according to enumeration in <xref ref-type="table" rid="T3">Table 3</xref>, segmentation 10 showed the best parameter combination, producing results that properly covered the homogeneity areas of the image. We used photointerpretation and visual techniques to choose the best parameter combination.</p>
<p>In addition, concerning the classification processes, all values extracted during the validation process performed in R (KAPPA index, overall accuracy, and overall error) are shown in <xref ref-type="table" rid="T5">Table 5</xref>. The classification to the date 05 of September 2016 received 0.987 for KAPPA Index and 0.989 for general accuracy. Moreover, the classification to the date 05 of January 2017 received 0.985 for KAPPA Index and 0.988 for general accuracy. Finally, the classification using the stack with two dates received 0.969 for KAPPA Index and 0.974 for general accuracy.</p>
<table-wrap id="T5" position="float">
<label>TABLE 5</label>
<caption>
<p>Results of all classification performed using the Random Forests algorithm.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">ID</th>
<th align="center">Date</th>
<th align="center">KAPPA index</th>
<th align="center">Overall accuracy</th>
<th align="center">Overall error</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>Map 1</italic>
</td>
<td align="center">Sep/25/2016</td>
<td align="center">0.987</td>
<td align="center">0.989</td>
<td align="center">0.011</td>
</tr>
<tr>
<td align="center">
<italic>Map 2</italic>
</td>
<td align="center">Jan/05/2017</td>
<td align="center">0.985</td>
<td align="center">0.988</td>
<td align="center">0.012</td>
</tr>
<tr>
<td align="center">
<italic>Map 3</italic>
</td>
<td align="center">Stack</td>
<td align="center">0.969</td>
<td align="center">0.974</td>
<td align="center">0.026</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>The processes using machine learning techniques require attention to their parameter choice. In the case of Random Forests, the main parameter tested is <italic>n-estimators</italic>, referring to the number of decision trees generated by the classifier. We tested three different numbers of decision trees, as exposed in Materials and Methods, and 500 trees were chosen as the better number for classifying the rupestrian grassland.</p>
<p>In general, the greater number of decision trees means a greater number of analyzes that the algorithm will perform to arrive at a result. This number of decision trees was chosen due to the peculiarities present in the type of image (hyperspatial resolution) and in the type of vegetation (rupestrian grassland with high vegetation heterogeneity). In this sense, for effective classification of the rupestrian grassland, more decision trees were needed. Thus, all data was processed using 500 decision trees and these results are presented in <xref ref-type="fig" rid="F4">Figure 4</xref>.</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption>
<p>Classification of rupestrian grassland Serra do Cip&#xf3; (MG, Brazil): <bold>(A)</bold> drone flight image from 25 September 2016; <bold>(B)</bold> drone flight image from 05 January 2017; <bold>(C)</bold> stack with two drone flight dates: 25 September 2016 (dry season) and 05 January 2017 (wet season); <bold>(D)</bold> original orthomosaic arising from SfM algorithm; <bold>(E)</bold> zoom of a specific portion of the original orthomosaic; <bold>(F)</bold> zoom of a specific portion of the drone flight classified image from 25 September 2016; <bold>(G)</bold> zoom of a specific portion of the drone flight classified image from 05 January 2017; <bold>(H)</bold> zoom of a specific portion of the stack classified image.</p>
</caption>
<graphic xlink:href="fenvs-11-1083328-g004.tif"/>
</fig>
<p>We classified each image individually and, also, the stack image (a merged between the image dated 25 September 2016 and 05 January 2017). According to <xref ref-type="table" rid="T6">Table 6</xref>, which shows the proportion of each spectral class according to the total area, in each classification performed, the Rocky Outcrop class received the highest percentage of area classified (25.398% in September 2016, 24.107% in January 2017 and 25.114% in stack), followed by Sandy Grassland (20.806% in September 2016, 21.108% in January 2017 and 18.689% in stack). The lowest percentage of area classified focus on Bare Soils (1.243% in September 2016, 0.994% in January 2017 and 1.408% in stack), followed by Bare Rocks (1.924% in September 2016, 2.431% in January 2017 and 2.325% in stack). The stacking process helped the algorithm to properly classify the spectral classes present in rupestrian grassland according to reality, furthering the capture of different phenological stages and patterns of each vegetation type.</p>
<table-wrap id="T6" position="float">
<label>TABLE 6</label>
<caption>
<p>Percentage of each spectral class identified in classification maps (25 September 2016, 05 January 2017 and the stack).</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center">Class</th>
<th align="center">2016</th>
<th align="center">2017</th>
<th align="center">Stack</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>1: Rocky outcrop</italic>
</td>
<td align="center">25.398</td>
<td align="center">24.107</td>
<td align="center">25.114</td>
</tr>
<tr>
<td align="center">
<italic>2: Sandy grassland</italic>
</td>
<td align="center">20.806</td>
<td align="center">21.108</td>
<td align="center">18.689</td>
</tr>
<tr>
<td align="center">
<italic>3: Stony grassland</italic>
</td>
<td align="center">13.873</td>
<td align="center">16.151</td>
<td align="center">14.062</td>
</tr>
<tr>
<td align="center">
<italic>4: Cerrado scrubland</italic>
</td>
<td align="center">13.395</td>
<td align="center">11.774</td>
<td align="center">13.946</td>
</tr>
<tr>
<td align="center">
<italic>5: Wet grassland</italic>
</td>
<td align="center">6.898</td>
<td align="center">6.255</td>
<td align="center">7.094</td>
</tr>
<tr>
<td align="center">
<italic>6: Water body</italic>
</td>
<td align="center">2.382</td>
<td align="center">1.303</td>
<td align="center">2.381</td>
</tr>
<tr>
<td align="center">
<italic>7: Riparian forest</italic>
</td>
<td align="center">14.080</td>
<td align="center">14.039</td>
<td align="center">14.981</td>
</tr>
<tr>
<td align="center">
<italic>8: Bare rocks</italic>
</td>
<td align="center">1.924</td>
<td align="center">2.431</td>
<td align="center">2.325</td>
</tr>
<tr>
<td align="center">
<italic>9: Bare soils</italic>
</td>
<td align="center">1.243</td>
<td align="center">0.994</td>
<td align="center">1.408</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>In <xref ref-type="table" rid="T7">Tables 7</xref>&#x2013;<xref ref-type="table" rid="T9">9</xref> we presented the results of analyzes performed with the error matrix of each classification process. An error matrix is a square array of numbers set out in rows and columns, in which the columns represent the reference data (validation data) while the rows represent the classification generated from the UAS data. The major diagonal of the matrix indicates the percent of pixels classified correctly, while the off-diagonal cell values represent the percent of pixels classified incorrectly in each spectral class.</p>
<table-wrap id="T7" position="float">
<label>TABLE 7</label>
<caption>
<p>Error matrix derived from the classification performed with Random Forest algorithm, using drone flight from date 25 September 2016.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center"/>
<th align="center">1</th>
<th align="center">2</th>
<th align="center">3</th>
<th align="center">4</th>
<th align="center">5</th>
<th align="center">6</th>
<th align="center">7</th>
<th align="center">8</th>
<th align="center">9</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>1: Rocky outcrop</italic>
</td>
<td align="center">
<bold>9.620</bold>
</td>
<td align="center">0</td>
<td align="center">0.001</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.004</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>2: Sandy grassland</italic>
</td>
<td align="center">0.012</td>
<td align="center">
<bold>18.380</bold>
</td>
<td align="center">0.029</td>
<td align="center">0.120</td>
<td align="center">0.302</td>
<td align="center">0</td>
<td align="center">0.001</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>3: Stony grassland</italic>
</td>
<td align="center">0</td>
<td align="center">0.007</td>
<td align="center">
<bold>16.270</bold>
</td>
<td align="center">0.035</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>4: Cerrado scrubland</italic>
</td>
<td align="center">0.005</td>
<td align="center">0.081</td>
<td align="center">0.003</td>
<td align="center">
<bold>9.060</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.009</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>5: Wet grassland</italic>
</td>
<td align="center">0</td>
<td align="center">0.322</td>
<td align="center">0</td>
<td align="center">0.034</td>
<td align="center">
<bold>10.090</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>6: Water body</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">
<bold>5.170</bold>
</td>
<td align="center">0.002</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>7: Riparian forest</italic>
</td>
<td align="center">0.003</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.008</td>
<td align="center">
<bold>26.970</bold>
</td>
<td align="center">0.047</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>8: Bare rocks</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.021</td>
<td align="center">
<bold>2.270</bold>
</td>
<td align="center">0.025</td>
</tr>
<tr>
<td align="center">
<italic>9: Bare soils</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.019</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">
<bold>2.160</bold>
</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T8" position="float">
<label>TABLE 8</label>
<caption>
<p>Error matrix derived from the classification performed with Random Forest algorithm, using drone flight from date 05 January 2017.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center"/>
<th align="center">1</th>
<th align="center">2</th>
<th align="center">3</th>
<th align="center">4</th>
<th align="center">5</th>
<th align="center">6</th>
<th align="center">7</th>
<th align="center">8</th>
<th align="center">9</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>1: Rocky outcrop</italic>
</td>
<td align="center">
<bold>10.110</bold>
</td>
<td align="center">0</td>
<td align="center">0.005</td>
<td align="center">0</td>
<td align="center">0.001</td>
<td align="center">0</td>
<td align="center">0.005</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>2: Sandy grassland</italic>
</td>
<td align="center">0</td>
<td align="center">
<bold>19.360</bold>
</td>
<td align="center">0.009</td>
<td align="center">0.027</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>3: Stony grassland</italic>
</td>
<td align="center">0.003</td>
<td align="center">0.046</td>
<td align="center">
<bold>17.140</bold>
</td>
<td align="center">0.005</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.05</td>
<td align="center">0</td>
<td align="center">0.493</td>
</tr>
<tr>
<td align="center">
<italic>4: Cerrado scrubland</italic>
</td>
<td align="center">0.021</td>
<td align="center">0.07</td>
<td align="center">0.001</td>
<td align="center">
<bold>9.550</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.011</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>5: Wet grassland</italic>
</td>
<td align="center">0</td>
<td align="center">0.283</td>
<td align="center">0</td>
<td align="center">0.089</td>
<td align="center">
<bold>10.930</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>6: Water body</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">
<bold>3.050</bold>
</td>
<td align="center">0.013</td>
<td align="center">0.004</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>7: Riparian forest</italic>
</td>
<td align="center">0.004</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.01</td>
<td align="center">
<bold>25.620</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>8: Bare rocks</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.019</td>
<td align="center">
<bold>2.420</bold>
</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>9: Bare soils</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.053</td>
<td align="center">0</td>
<td align="center">0.009</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">
<bold>1.810</bold>
</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T9" position="float">
<label>TABLE 9</label>
<caption>
<p>Error matrix derived from the classification performed with Random Forest algorithm. Stack with two drone flight image dates: 25 September 2016 and 05 January 2017.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="center"/>
<th align="center">1</th>
<th align="center">2</th>
<th align="center">3</th>
<th align="center">4</th>
<th align="center">5</th>
<th align="center">6</th>
<th align="center">7</th>
<th align="center">8</th>
<th align="center">9</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="center">
<italic>1: Rocky outcrop</italic>
</td>
<td align="center">
<bold>9.750</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.002</td>
<td align="center">0</td>
<td align="center">0.007</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>2: Sandy grassland</italic>
</td>
<td align="center">0.012</td>
<td align="center">
<bold>17.601</bold>
</td>
<td align="center">0.024</td>
<td align="center">0.110</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>3: Stony grassland</italic>
</td>
<td align="center">0</td>
<td align="center">0.005</td>
<td align="center">
<bold>16.030</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>4: Cerrado scrubland</italic>
</td>
<td align="center">0.024</td>
<td align="center">0.055</td>
<td align="center">0.003</td>
<td align="center">
<bold>9.190</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.009</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>5: Wet grassland</italic>
</td>
<td align="center">0.011</td>
<td align="center">1.410</td>
<td align="center">0</td>
<td align="center">0.082</td>
<td align="center">
<bold>10.550</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>6: Water body</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">
<bold>5.088</bold>
</td>
<td align="center">0.105</td>
<td align="center">0.044</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>7: Riparian forest</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.011</td>
<td align="center">0</td>
<td align="center">0.175</td>
<td align="center">
<bold>27.270</bold>
</td>
<td align="center">0</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>8: Bare rocks</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.023</td>
<td align="center">
<bold>2.290</bold>
</td>
<td align="center">0</td>
</tr>
<tr>
<td align="center">
<italic>9: Bare soils</italic>
</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.490</td>
<td align="center">0.011</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0</td>
<td align="center">0.012</td>
<td align="center">
<bold>2.220</bold>
</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>In all results, we detected an uncertainty between two spectral classes: sandy grassland and wet grassland. In <xref ref-type="table" rid="T7">Table 7</xref>, 18.380% of pixels of sandy grassland were classified correctly, but 0.322% of these pixels were misclassified as wet grassland. In <xref ref-type="table" rid="T8">Table 8</xref>, 19.320% of pixels were classified correctly and 0.283% misclassified. In <xref ref-type="table" rid="T9">Table 9</xref>, 17.601% were classified correctly and 1.410% misclassified.</p>
<p>The other two relevant analyzes performed in the error matrix are presented in <xref ref-type="table" rid="T8">Table 8</xref>, where 0.493% of pixels of bare soils were misclassified as stony grassland. Also, in <xref ref-type="table" rid="T9">Table 9</xref>, 0.490% of pixels of stony grassland were misclassified as bare soils. This analysis showed some uncertainty between bare soils and stony grassland.</p>
</sec>
<sec sec-type="discussion" id="s4">
<title>4 Discussion</title>
<p>Multi-temporal UAS images associated with RGB cameras, and the machine learning methods applied in our study produced consistent results, as far as concerns the ecology analysis and are adequate for mapping vegetation, at least at the life-form level (i.e., tree, shrub, and herbaceous species). The combination between the OBIA method and the Random Forest classifier reduced the misclassified pixels and the effect known as &#x2018;salt and pepper&#x2019;, generating extremely high general accuracy and kappa index. In general, machine learning approaches have high potential to capture the non-linear relationship between remote sensing data and vegetation parameters and have the capability of integrating multisource information at different levels (<xref ref-type="bibr" rid="B84">Yao, Qin, and Chen, 2019</xref>). In addition, the results acquired confirmed the importance of using machine learning algorithms in remote sensing vegetation classification, due to their powerful adaptation, self-learning, and parallel processing capabilities (<xref ref-type="bibr" rid="B49">Navin and Agilandeeswari, 2020</xref>; <xref ref-type="bibr" rid="B46">Meng et al., 2021</xref>; <xref ref-type="bibr" rid="B47">2022</xref>).</p>
<p>Therefore, some challenges need to be emphasized and both the hardware and software of UAS remote sensing technology require improvements: [1] The endurance of UAS is relatively limited, the flight stability is not strong enough in areas with large terrain fluctuation and the lack of flight altitude limits the image size; [2] Although more lightweight and smaller sensor systems have become available, such as hyperspectral and LiDAR sensors, but they are still expensive; [3] The integration between UAS platforms and sensors requires improvement, e.g., most of the multispectral, hyperspectral, and thermal sensors are built independent of the UAV platform, so, need an extra GPS module and, also, UAS are often equipped with a single sensor, multisensor integration is beneficial to improve monitoring accuracy and efficiency; [4] The mosaic workload is significantly higher than satellite imagery, which takes up more time for image processing, resulting in the need to develop more robust algorithms, like deep learning techniques, in addition, the technology of mass data processing needs to be improved due to the richness and variety of data obtained; [5] The most vegetation classifications <italic>via</italic> UAS require human participation and interpretation, indicating that the combination between UAS remote sensing with ground data and satellite data needs to be strengthened; if the dataset used for training is extent, computer learning techniques would generate a satisfactory classification outcome; [6] The use of UAS images to monitor tropical savannas leaf phenology is a challenge due to difficult methods to extract accurate quantitative phenology estimates under variable lighting and viewing conditions; [7] The application scenarios of UAS remote sensing in grassland ecosystem monitoring need to be expanded and deepened, mainly in animal investigation and soil physical and chemical monitoring; and also, the correlation between the scientific research of UAS remote sensing monitoring and practical decision making of grassland management is still insufficient (<xref ref-type="bibr" rid="B50">Neumann et al., 2019</xref>; <xref ref-type="bibr" rid="B55">Park et al., 2019</xref>; <xref ref-type="bibr" rid="B41">Lyu et al., 2020</xref>; <xref ref-type="bibr" rid="B40">2022</xref>; <xref ref-type="bibr" rid="B71">Sun et al., 2021</xref>).</p>
<p>The hyperspatial imagery, like UAS technology, associated with machine learning classification techniques enabled the identification of even fine grassland vegetation types such as wet, stony, and sandy grasslands. Although, some adjustments were necessary for improving the accuracy of orthomosaic classification, such as [1] precise sensor position; [2] orientation data, and [3] several GCPs during the orthomosaic registration process (<xref ref-type="bibr" rid="B73">Tsai and Lin, 2017</xref>).</p>
<sec id="s4-1">
<title>4.1 Grassland classification and their relationship with vegetation phenophases</title>
<p>Identifying the grassland classes are crucial for managing and utilizing grassland resources and for reconstructing and restoring the grassland ecological environment (<xref ref-type="bibr" rid="B47">Meng et al., 2022</xref>). To further understand the vegetation distribution across a tropical mountaintop landscape, and associate this with leafing dynamics, we quantified the Land Use and Land Cover patterns at the vegetation level using OBIA method and Random Forests classifier. Using hyperspatial imagery through UAS remote sensing technology was possible the identification of vegetation phenophases. Phenophases, from phenology, are defined as an observable stage or phase in the annual life cycle of a plant or animal and are considered as the period over which newly emerging leaves are visible or open flowers are present on a plant or community (<xref ref-type="bibr" rid="B13">Camargo et al., 2018</xref>). Abiotic factors, mainly water and light availability, are primary drivers of plant phenology, and are important to predict vegetation changes over time (<xref ref-type="bibr" rid="B77">Walker and Noy-Meir, 1982</xref>; <xref ref-type="bibr" rid="B3">Alberton et al., 2019</xref>), for instance, water availability regulates the length of the growing season and the phenological synchrony among savannas species (<xref ref-type="bibr" rid="B13">Camargo et al., 2018</xref>; <xref ref-type="bibr" rid="B3">Alberton et al., 2019</xref>).</p>
<p>Generally, tropical savannas present marked seasonality (<xref ref-type="bibr" rid="B77">Walker and Noy-Meir, 1982</xref>), with a conspicuous contrast between the dry and rainy seasons (<xref ref-type="bibr" rid="B27">Ferreira et al., 2003</xref>; <xref ref-type="bibr" rid="B13">Camargo et al., 2018</xref>; <xref ref-type="bibr" rid="B3">Alberton et al., 2019</xref>), and tropical species present high heterogeneity of phenological patterns (<xref ref-type="bibr" rid="B13">Camargo et al., 2018</xref>; <xref ref-type="bibr" rid="B55">Park et al., 2019</xref>). Leafing is the plant phenological event that defines the growth season and controls crucial ecosystem processes (<xref ref-type="bibr" rid="B2">Alberton et al., 2014</xref>; <xref ref-type="bibr" rid="B3">2019</xref>), in our case, the image dated 25 September 2016 (end of dry season) revealed the beginning of springtime, showing the timing of the budburst of individual trees and coinciding with the beginning of tree canopy and grassland greenness (<xref ref-type="bibr" rid="B69">Streher et al., 2017</xref>), which is responsible for causing a change in leaf color (<xref ref-type="bibr" rid="B97">Alberton et al., 2017</xref>; <xref ref-type="bibr" rid="B3">2019</xref>; <xref ref-type="bibr" rid="B14">Camargo et al., 2014</xref>; <xref ref-type="bibr" rid="B13">2018</xref>), consequently, resulting in a differentiation visible in the orthophoto. Otherwise, the image dated 5 January 2017 represents the end of canopy development in the wet season, indicating the tree&#x2019;s mature leaf stages, the leaf aging process, and flowering/fruiting across the grasslands (<xref ref-type="bibr" rid="B97">Alberton et al., 2017</xref>; <xref ref-type="bibr" rid="B3">Alberton et al., 2019</xref>; <xref ref-type="bibr" rid="B69">Streher et al., 2017</xref>; <xref ref-type="bibr" rid="B36">Le Stradic et al., 2018</xref>; <xref ref-type="bibr" rid="B14">Camargo et al., 2014</xref>; <xref ref-type="bibr" rid="B13">2018</xref>). This process causes a new change in the color patterns identified in the orthophoto. Our results resemble the sharp seasonal changes or spring flushing and abscission in autumn described by <xref ref-type="bibr" rid="B34">Klosterman and Richardson (2017)</xref>. In addition, the stack method helped to capture these minor differences in vegetation phenophases, mainly due to the differentiation in the color composition of the orthophoto.</p>
<p>Detailed phenophase delineations are limited in conventional satellite imagery, with spatial resolutions in order to meters/kilometers. Conversely, fine-scale phenological variations are mainly driven by individual species distributions and cannot be mapped by satellite imagery (<xref ref-type="bibr" rid="B33">Klosterman et al., 2018</xref>; <xref ref-type="bibr" rid="B50">Neumann et al., 2019</xref>). In general, ecologists use near-surface remote sensing, such as tower-mounted cameras, called &#x201c;phenocams&#x201d;, to quantify tropical leaf phenology, but the problem with this method is the limited area covered by phenocams, providing insufficient sample sizes for studying intra and interspecific variation of leaf phenology (<xref ref-type="bibr" rid="B55">Park et al., 2019</xref>). In this sense, UAS remote sensing techniques open up promising potentials for detailed ecosystem studies, and for the first time in ecology and remote sensing research history, the structure of ecosystems can be quantified from individual plants down to the leaf scale (<xref ref-type="bibr" rid="B21">D&#xed;az-Delgado, Cazacu, and Adamescu, 2019</xref>).</p>
<p>Plant phenology has been recognized as an ecological key indicator of ecosystem dynamics and represents an important manifestation of the temporal change of growth and reproduction in the plant life-cycle (<xref ref-type="bibr" rid="B96">Morellato et al., 2016</xref>). The association of plant phenology-mapping may act as a key component for monitoring vegetation dynamics, management practices and ecological restoration in applied nature conservation, and is regarded as fingerprints of climate change and biodiversity loss (<xref ref-type="bibr" rid="B50">Neumann et al., 2019</xref>; <xref ref-type="bibr" rid="B55">Park et al., 2019</xref>).</p>
<p>In summary, data collection and processing of UAS remote sensing is new and complicated for most ecologists and remote sensing researchers but opens new possibilities in plant ecology by addressing classical ecological questions at different ecological scales, individual, population, or community levels. Besides, some studies demonstrated that object-based classification approaches can successfully classify ultrahigh spatial resolution imagery, but the choice between what methods are better does not follow rigid rules and depends on the aims of your study and on several characteristics of the available data and the study area, such as vegetation type and phenology, land cover heterogeneity and imagery features (<xref ref-type="bibr" rid="B42">Ma et al., 2017</xref>; <xref ref-type="bibr" rid="B63">Ruwaimana et al., 2018</xref>).</p>
</sec>
</sec>
<sec sec-type="conclusion" id="s5">
<title>5 Conclusion</title>
<p>This study examined the use of Unmanned Aerial Systems remote sensing technology associated with machine learning techniques (OBIA method and Random Forest algorithm) to classify and understand the vegetation distribution across a grassland landscape. Through the results obtained with the validation process all classifications performed received general accuracy and KAPPA index above than 0.96, indicating a high efficiency of machine learning techniques to process the ultrahigh spatial resolution data generated by UAS images. Using the UAS to explore and analyze vegetation phenomena is becoming a new efficient remote sensing technique, providing vegetation recognition, the extraction of structural and functional properties of ecosystems and the quantification from individual plants down to the leaf scale. A big challenge of applying a robust algorithm is the choice of the better combination of parameters for proposing a high quality of vegetation classification. Our results demonstrated that the better combination of parameters, for segmentation process, were 20 for the number of K-means clusters, which controls the general homogeneity and the number of resulting clusters, and 600 pixels for the minimum size of the resulting cluster, which produced results that properly covered the homogeneity areas of the image. In addition, for the Random Forest classifier the number of decision trees chosen were 500 decision trees, where the greater number of decision trees means a greater number of analyzes that the algorithm will perform to arrive at a result.</p>
<p>In recent years, machine learning techniques associated with ultrahigh spatial resolution imagery has been widely used in grassland classification due to its high accuracy and powerful processing ability, however, still there are set of challenges and limitations, such as high time-consuming, because processing this big dataset require a great computational configuration, making it impossible processing the data with conventional techniques and traditional GIS tools; therefore, automatic identification of vegetation classes requires further exploration. In future studies, we suggest explore the method of combining machine learning algorithms, multitype indices (NDVI, DEM, temperature, precipitation, and so on) or multisensor integration to improve the accuracy of grassland classification. Considering future directions the UAS remote sensing technology has been rapidly improved and developed along with of precision and intelligence; the endurance, stability and flight height and other performance parameters of UAV platforms will be significantly improved; the cost of sensors can be reduced; machine learning techniques will become an important technical means to provide a technical solution for automatic processing and analysis of massive monitoring data, and finally, the decision-making support of UAS remote sensing for grassland management will be enhanced.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="s6">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec id="s7">
<title>Author contributions</title>
<p>Conceptualization, TM, TS, and LM; Methodology, TM, TS; Formal analysis, TM, TS; Field investigation, TS, LM; Data curation, LM, TS; Writing original draft, TM; Writing review and editing, TM, TS, and LM; Funding acquisition, project administration, LM.</p>
</sec>
<sec id="s8">
<title>Funding</title>
<p>This research was funded by the S&#x00E3;o Paulo Research Foundation FAPESP (grants FAPESP-Microsoft Research Institute #2013/50155-0, FAPESP grant #2009/54208-6), National Council for Scientific and Technological Development CNPq (#428055/2018-4), TM received a fellowship from FAPESP (#2019/03269-7) and fellowship from the CNPq (PIBIC), LM and TS received research productivity fellowships and grants from CNPq (#311820/2018-2).</p>
</sec>
<ack>
<p>We thank the BD Borges and LF Cancian for flying the drone in the field and pre-processing all drone imagery, and for technical support during fieldwork. We thank ICMBio for granting the permits needed to work at Serra do Cip&#xf3; National Park (PNSC) and its buffer zone, the Cedro Company for allowing access to their private land, and PELD-CRSC for logistic support. We are very thankful to our colleagues from the Phenology Lab and EcoDyn for their helpful insights and discussions.</p>
</ack>
<sec sec-type="COI-statement" id="s9">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s10">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Adao</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Hruska</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Padua</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Bessa</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Peres</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Morais</surname>
<given-names>R.</given-names>
</name>
<etal/>
</person-group> (<year>2017</year>). <article-title>Hyperspectral imaging: A review on UAV-based sensors, data processing and applications for agriculture and forestry</article-title>. <source>Remote Sens.</source> <volume>9</volume> (<issue>11</issue>), <fpage>1110</fpage>. <pub-id pub-id-type="doi">10.3390/rs9111110</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alberton</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Almeida</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Helm</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Torres</surname>
<given-names>R. S.</given-names>
</name>
<name>
<surname>Menzel</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Using phenological cameras to track the green up in a cerrado savanna and its on-the-ground validation</article-title>. <source>Ecol. Inf.</source> <volume>19</volume>, <fpage>62</fpage>&#x2013;<lpage>70</lpage>. <pub-id pub-id-type="doi">10.1016/j.ecoinf.2013.12.011</pub-id>
</citation>
</ref>
<ref id="B97">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alberton</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Torres</surname>
<given-names>R. S.</given-names>
</name>
<name>
<surname>Cancian</surname>
<given-names>L. F.</given-names>
</name>
<name>
<surname>Borges</surname>
<given-names>B. D.</given-names>
</name>
<name>
<surname>Almeida</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Mariano</surname>
<given-names>G. C.</given-names>
</name>
<etal/>
</person-group> (<year>2017</year>). <article-title>Introducing digital cameras to monitor plant phenology in the tropics: Applications for conservation</article-title>. <source>Perspect. Ecol. Conserv.</source> <volume>12</volume> (<issue>2</issue>), <fpage>82</fpage>&#x2013;<lpage>90</lpage>. <pub-id pub-id-type="doi">10.1016/j.pecon.2017.06.004</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alberton</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Torres</surname>
<given-names>R. S.</given-names>
</name>
<name>
<surname>Silva</surname>
<given-names>T. S. F.</given-names>
</name>
<name>
<surname>Rocha</surname>
<given-names>H. R.</given-names>
</name>
<name>
<surname>Moura</surname>
<given-names>M. S. B.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Leafing patterns and drivers across seasonally dry tropical communities</article-title>. <source>Remote Sens.</source> <volume>11</volume> (<issue>19</issue>), <fpage>2267</fpage>&#x2013;<lpage>67</lpage>. <pub-id pub-id-type="doi">10.3390/rs11192267</pub-id>
</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Almeida</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Pedronette</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Alberton</surname>
<given-names>B. C.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
<name>
<surname>Torres</surname>
<given-names>R. S.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Unsupervised distance learning for plant species identification</article-title>. <source>IEEE J. Sel. Top. Appl. Earth Observations Remote Sens.</source> <volume>9</volume> (<issue>12</issue>), <fpage>5325</fpage>&#x2013;<lpage>5338</lpage>. <pub-id pub-id-type="doi">10.1109/JSTARS.2016.2608358</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Almeida</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Santos</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Alberton</surname>
<given-names>B. C.</given-names>
</name>
<name>
<surname>Torres</surname>
<given-names>R. S.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Applying machine learning based on multiscale classifiers to detect remote phenology patterns in Cerrado savanna trees</article-title>. <source>Ecol. Inf.</source> <volume>23</volume>, <fpage>49</fpage>&#x2013;<lpage>61</lpage>. <pub-id pub-id-type="doi">10.1016/j.ecoinf.2013.06.011</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Anderson</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Gaston</surname>
<given-names>K. J.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Lightweight unmanned aerial vehicles will revolutionize spatial ecology</article-title>. <source>Front. Ecol. Environ.</source> <volume>11</volume> (<issue>3</issue>), <fpage>138</fpage>&#x2013;<lpage>146</lpage>. <pub-id pub-id-type="doi">10.1890/120150</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Baena</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Boyd</surname>
<given-names>D. S.</given-names>
</name>
<name>
<surname>Moat</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>UAVs in pursuit of plant conservation&#x2014;real world experiences</article-title>. <source>Ecol. Inf.</source> <volume>47</volume>, <fpage>2</fpage>&#x2013;<lpage>9</lpage>. <pub-id pub-id-type="doi">10.1016/j.ecoinf.2017.11.001</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Balasubramanian</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Zhou</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Ji</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Grace</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Bai</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Song</surname>
<given-names>Q.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Environmental and management controls of soil carbon storage in grasslands of southwestern China</article-title>. <source>J. Environ. Manag.</source> <volume>254</volume>, <fpage>109810</fpage>. <pub-id pub-id-type="doi">10.1016/j.jenvman.2019.109810</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Berra</surname>
<given-names>E. F.</given-names>
</name>
<name>
<surname>Gaulton</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Barr</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Assessing spring phenology of a temperate woodland: A multiscale comparison of ground, unmanned aerial vehicle, and landsat satellite observations</article-title>. <source>Remote Sens. Environ.</source> <volume>223</volume>, <fpage>229</fpage>&#x2013;<lpage>242</lpage>. <pub-id pub-id-type="doi">10.1016/j.rse.2019.01.010</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Blaschke</surname>
<given-names>T.</given-names>
</name>
</person-group> (<year>2010</year>). <article-title>Object-based image analysis for remote sensing</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>65</volume>, <fpage>2</fpage>&#x2013;<lpage>16</lpage>. <pub-id pub-id-type="doi">10.1016/j.isprsjprs.2009.06.004</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Breiman</surname>
<given-names>L.</given-names>
</name>
</person-group> (<year>2001</year>). <article-title>Random forests</article-title>. <source>Mach. Learn.</source> <volume>45</volume>, <fpage>5</fpage>&#x2013;<lpage>32</lpage>. <pub-id pub-id-type="doi">10.1023/A:1010933404324</pub-id>
</citation>
</ref>
<ref id="B95">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Buisson</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Archibald</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Fidelis</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Suding</surname>
<given-names>K. N.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Ancient grasslands guide ambitious goals in grassland restoration</article-title>. <source>Science</source> <volume>377</volume> (<issue>6606</issue>), <fpage>594-598</fpage>. <pub-id pub-id-type="doi">10.1126/science.abo4605</pub-id>
</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bunting</surname>
<given-names>P. J.</given-names>
</name>
<name>
<surname>Clewley</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Lucas</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Gillingham</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>The remote sensing and GIS software library (RSGISLib)</article-title>. <source>Comput. Geoscience</source> <volume>62</volume>, <fpage>216</fpage>&#x2013;<lpage>226</lpage>. <pub-id pub-id-type="doi">10.1016/j.cageo.2013.08.007</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Camargo</surname>
<given-names>M. G. G.</given-names>
</name>
<name>
<surname>Carvalho</surname>
<given-names>G. A.</given-names>
</name>
<name>
<surname>Alberton</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>C Morellato</surname>
<given-names>L. P.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Leafing patterns and leaf exchange strategies of a cerrado woody community</article-title>. <source>Biotropica</source> <volume>50</volume> (<issue>3</issue>), <fpage>442</fpage>&#x2013;<lpage>454</lpage>. <pub-id pub-id-type="doi">10.1111/btp.12552</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Camargo</surname>
<given-names>M. G. G.</given-names>
</name>
<name>
<surname>Cazetta</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
<name>
<surname>Schaefer</surname>
<given-names>H. M.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Characterizing background heterogeneity in visual communication</article-title>. <source>Basic Appl. Ecol.</source> <volume>15</volume>, <fpage>326</fpage>&#x2013;<lpage>335</lpage>. <pub-id pub-id-type="doi">10.1016/j.baae.2014.06.002</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Christian</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Christiane</surname>
<given-names>S.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Impact of tree species on magnitude of PALSAR interferometric coherence over Siberian forest at frozen and unfrozen conditions</article-title>. <source>Remote Sens.</source> <volume>6</volume> (<issue>2</issue>), <fpage>1124</fpage>&#x2013;<lpage>1136</lpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3390/rs6021124">https://doi.org/10.3390/rs6021124</ext-link>.</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Colli-Silva</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Vasconcelos</surname>
<given-names>T. N. C.</given-names>
</name>
<name>
<surname>Pirani</surname>
<given-names>J. R.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Outstanding plant endemism levels strongly support the recognition of campo rupestre provinces in mountaintops of eastern South America</article-title>. <source>J. Biogeogr.</source> <volume>46</volume>, <fpage>1723</fpage>&#x2013;<lpage>1733</lpage>. <pub-id pub-id-type="doi">10.1111/jbi.13585</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Colomina</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Molina</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Unmanned aerial systems for photogrammetry and remote sensing: A review</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>92</volume>, <fpage>79</fpage>&#x2013;<lpage>97</lpage>. <pub-id pub-id-type="doi">10.1016/j.isprsjprs.2014.02.013</pub-id>
</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Congalton</surname>
<given-names>R. G.</given-names>
</name>
</person-group> (<year>1991</year>). <article-title>A review of assessing the accuracy of classifications of remotely sensed data</article-title>. <source>Remote Sens. Environ.</source> <volume>37</volume> (<issue>1</issue>), <fpage>35</fpage>&#x2013;<lpage>46</lpage>. <pub-id pub-id-type="doi">10.1016/0034-4257(91)90048-B</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Corbane</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Lang</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Pipkins</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Alleaume</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Deshayes</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Mill&#xe1;n</surname>
<given-names>V. E. G.</given-names>
</name>
<etal/>
</person-group> (<year>2015</year>). <article-title>Remote sensing for mapping natural habitats and their conservation status&#x2014;new opportunities and challenges</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>37</volume>, <fpage>7</fpage>&#x2013;<lpage>16</lpage>. <pub-id pub-id-type="doi">10.1016/j.jag.2014.11.005</pub-id>
</citation>
</ref>
<ref id="B20">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Cullum</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Rogers</surname>
<given-names>K. H.</given-names>
</name>
<name>
<surname>Brierley</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Witkowski</surname>
<given-names>E. T. F.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Ecological classification and mapping for landscape management and science: Foundations for the description of patterns and processes</article-title>. <source>Prog. Phys. Geogr. Earth Environ.</source> <volume>40</volume>, <fpage>38</fpage>&#x2013;<lpage>65</lpage>. <pub-id pub-id-type="doi">10.1177/0309133315611573</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>D&#xed;az-Delgado</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Cazacu</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Adamescu</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Rapid assessment of ecological integrity for LTER wetland sites by using UAV multispectral mapping</article-title>. <source>Drones</source> <volume>3</volume> (<issue>1</issue>), <fpage>3</fpage>. <pub-id pub-id-type="doi">10.3390/drones3010003</pub-id>
</citation>
</ref>
<ref id="B22">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Dujon</surname>
<given-names>A. M.</given-names>
</name>
<name>
<surname>Schofield</surname>
<given-names>G.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Importance of machine learning for enhancing ecological studies using information-rich imagery</article-title>. <source>Endanger. Species Res.</source> <volume>39</volume>, <fpage>91</fpage>&#x2013;<lpage>104</lpage>. <pub-id pub-id-type="doi">10.3354/esr00958</pub-id>
</citation>
</ref>
<ref id="B23">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Feng</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>He</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>A comprehensive review on recent applications of unmanned aerial vehicle remote sensing with various sensors for high-throughput plant phenotyping</article-title>. <source>Comput. Electron. Agric.</source> <volume>182</volume>, <fpage>106033</fpage>. <pub-id pub-id-type="doi">10.1016/j.compag.2021.106033</pub-id>
</citation>
</ref>
<ref id="B24">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fernandes</surname>
<given-names>G. W.</given-names>
</name>
<name>
<surname>Arantes-Garcia</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Barbosa</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>U Barbosa</surname>
<given-names>N. P.</given-names>
</name>
<name>
<surname>Batista</surname>
<given-names>E. K. L.</given-names>
</name>
<name>
<surname>Beiroz</surname>
<given-names>W.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Biodiversity and ecosystem services in the campo rupestre: A road map for the sustainability of the hottest Brazilian biodiversity hotspot</article-title>. <source>Perspect. Ecol. Conservation</source> <volume>4</volume>, <fpage>213</fpage>&#x2013;<lpage>222</lpage>. <pub-id pub-id-type="doi">10.1016/j.pecon.2020.10.004</pub-id>
</citation>
</ref>
<ref id="B25">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Fernandes</surname>
<given-names>G. W.</given-names>
</name>
<name>
<surname>Barbosa</surname>
<given-names>N. P. U.</given-names>
</name>
<name>
<surname>Alberton</surname>
<given-names>B. C.</given-names>
</name>
<name>
<surname>Barbieri</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Dirzo</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Goulart</surname>
<given-names>F.</given-names>
</name>
<etal/>
</person-group> (<year>2018</year>). <article-title>The deadly route to collapse and the uncertain fate of Brazilian rupestrian grasslands</article-title>. <source>Biodivers. Conservation</source> <volume>27</volume>, <fpage>2587</fpage>&#x2013;<lpage>2603</lpage>. <pub-id pub-id-type="doi">10.1007/s10531-018-1556-4</pub-id>
</citation>
</ref>
<ref id="B26">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Fernandes</surname>
<given-names>G. W.</given-names>
</name>
</person-group> (<year>2016</year>). <source>Ecology and conservation of mountaintop grasslands in Brazil</source>. <publisher-loc>Cham</publisher-loc>: <publisher-name>Springer International Publishing</publisher-name>.</citation>
</ref>
<ref id="B27">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ferreira</surname>
<given-names>L. G.</given-names>
</name>
<name>
<surname>Yoshioka</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Huete</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Sano</surname>
<given-names>F. F.</given-names>
</name>
</person-group> (<year>2003</year>). <article-title>Seasonal landscape and spectral vegetation index dynamics in the Brazilian cerrado: An analysis within the large-scale biosphere&#x2013;atmosphere experiment in amaz&#xf4;nia (LBA)</article-title>. <source>Remote Sens. Environ.</source> <volume>87</volume> (<issue>4</issue>), <fpage>534</fpage>&#x2013;<lpage>550</lpage>. <pub-id pub-id-type="doi">10.1016/j.rse.2002.09.003</pub-id>
</citation>
</ref>
<ref id="B28">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Giulietti</surname>
<given-names>A. M.</given-names>
</name>
<name>
<surname>Pirani</surname>
<given-names>J. R.</given-names>
</name>
<name>
<surname>Harley</surname>
<given-names>R. M.</given-names>
</name>
</person-group> (<year>1997</year>). <article-title>Espinha&#xe7;o range region. Eastern Brazil</article-title>. <source>Centers plant Divers. A guide strategies conservation</source> <volume>3</volume>, <fpage>397</fpage>&#x2013;<lpage>404</lpage>.</citation>
</ref>
<ref id="B29">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hamylton</surname>
<given-names>S. M.</given-names>
</name>
<name>
<surname>Morris</surname>
<given-names>R. H.</given-names>
</name>
<name>
<surname>Carvalho</surname>
<given-names>R. C.</given-names>
</name>
<name>
<surname>Roder</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Barlow</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Mills</surname>
<given-names>K.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Evaluating techniques for mapping island vegetation from unmanned aerial vehicle (UAV) images: Pixel classification, visual interpretation, and machine learning approaches</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>89</volume>, <fpage>102085</fpage>. <pub-id pub-id-type="doi">10.1016/j.jag.2020.102085</pub-id>
</citation>
</ref>
<ref id="B30">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Hassanalian</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Abdelkefi</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Classifications, applications, and design challenges of drones: A review</article-title>. <source>Prog. Aerosp. Sci.</source> <volume>91</volume>, <fpage>99</fpage>&#x2013;<lpage>131</lpage>. <pub-id pub-id-type="doi">10.1016/j.paerosci.2017.04.003</pub-id>
</citation>
</ref>
<ref id="B31">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Jensen</surname>
<given-names>J. R.</given-names>
</name>
</person-group> (<year>2015</year>). <source>Introductory digital image processing: A remote sensing perspective</source>. <publisher-loc>London, UK</publisher-loc>: <publisher-name>Pearson</publisher-name>.</citation>
</ref>
<ref id="B32">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Kampen</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Lederbauer</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Mund</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Immitzer</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2019</year>). <source>UAV-based multispectral data for tree species classification and tree vitality analysis</source>. <comment>Paper presented at the Dreil&#xe4;ndertagung der DGPF, der OVG und der SGPF in Wien, &#xd6;sterreich</comment>.</citation>
</ref>
<ref id="B33">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Klosterman</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Melaas</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Martinez</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Frederick</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>O&#x2019;Keefe</surname>
<given-names>J.</given-names>
</name>
<etal/>
</person-group> (<year>2018</year>). <article-title>Fine-scale perspectives on landscape phenology from unmanned aerial vehicle (UAV) photography</article-title>. <source>Agric. For. Meteorology</source> <volume>248</volume>, <fpage>397</fpage>&#x2013;<lpage>407</lpage>. <pub-id pub-id-type="doi">10.1016/j.agrformet.2017.10.015</pub-id>
</citation>
</ref>
<ref id="B34">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Klosterman</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Richardson</surname>
<given-names>A. D.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Observing spring and fall phenology in a deciduous forest with aerial drone imagery</article-title>. <source>Sensors</source> <volume>17</volume> (<issue>12</issue>), <fpage>2852</fpage>&#x2013;<lpage>52</lpage>. <pub-id pub-id-type="doi">10.3390/s17122852</pub-id>
</citation>
</ref>
<ref id="B35">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Laliberte</surname>
<given-names>A. S.</given-names>
</name>
<name>
<surname>Rango</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>Image processing and classification procedures for analysis of sub-decimeter imagery acquired with an unmanned aircraft over arid rangelands</article-title>. <source>GIScience Remote Sens.</source> <volume>48</volume> (<issue>1</issue>), <fpage>4</fpage>&#x2013;<lpage>23</lpage>. <pub-id pub-id-type="doi">10.2747/1548-1603.48.1.4</pub-id>
</citation>
</ref>
<ref id="B36">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Le Stradic</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Buisson</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Fernandes</surname>
<given-names>G. W.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Reproductive phenology of two co-occurring Neotropical mountain grasslands</article-title>. <source>J. Veg. Sci.</source> <volume>29</volume>, <fpage>15</fpage>&#x2013;<lpage>24</lpage>. <pub-id pub-id-type="doi">10.1111/jvs.12596</pub-id>
</citation>
</ref>
<ref id="B37">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Liu</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Abd-Elrahman</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Multi-view object-based classification of wetland land covers using unmanned aircraft system images</article-title>. <source>Remote Sens. Environ.</source> <volume>216</volume>, <fpage>122</fpage>&#x2013;<lpage>138</lpage>. <pub-id pub-id-type="doi">10.1016/j.rse.2018.06.043</pub-id>
</citation>
</ref>
<ref id="B38">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lu</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>He</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Species classification using Unmanned Aerial Vehicle (UAV)-acquired high spatial resolution imagery in a heterogeneous grassland</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>128</volume>, <fpage>73</fpage>&#x2013;<lpage>85</lpage>. <pub-id pub-id-type="doi">10.1016/j.isprsjprs.2017.03.011</pub-id>
</citation>
</ref>
<ref id="B39">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lu</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Weng</surname>
<given-names>Q.</given-names>
</name>
</person-group> (<year>2007</year>). <article-title>A survey of image classification methods and techniques for improving classification performance</article-title>. <source>Int. J. Remote Sens.</source> <volume>28</volume>, <fpage>823</fpage>&#x2013;<lpage>870</lpage>. <pub-id pub-id-type="doi">10.1080/01431160600746456</pub-id>
</citation>
</ref>
<ref id="B40">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lyu</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Dang</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Dou</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Lou</surname>
<given-names>A.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Unmanned aerial vehicle (UAV) remote sensing in grassland ecosystem monitoring: A systematic review</article-title>. <source>Remote Sens.</source> <volume>14</volume>, <fpage>1096</fpage>. <pub-id pub-id-type="doi">10.3390/rs14051096</pub-id>
</citation>
</ref>
<ref id="B41">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Lyu</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Dang</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Dou</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Xuan</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>A new method for grassland degradation monitoring by vegetation species composition using hyperspectral remote sensing</article-title>. <source>Ecol. Indic.</source> <volume>114</volume>, <fpage>106310</fpage>. <pub-id pub-id-type="doi">10.1016/j.ecolind.2020.106310</pub-id>
</citation>
</ref>
<ref id="B42">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ma</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Ma</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Cheng</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Du</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Liu</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>A review of supervised object-based land-cover image classification</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>130</volume>, <fpage>277</fpage>&#x2013;<lpage>293</lpage>. <pub-id pub-id-type="doi">10.1016/j.isprsjprs.2017.06.001</pub-id>
</citation>
</ref>
<ref id="B43">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mahesh</surname>
<given-names>B.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Machine learning algorithms &#x2013; a review</article-title>. <source>Int. J. Sci. Res. (IJSR)</source> <volume>9</volume> (<issue>1</issue>). <pub-id pub-id-type="doi">10.21275/ART20203995</pub-id>
</citation>
</ref>
<ref id="B44">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mattos</surname>
<given-names>J. S.</given-names>
</name>
<name>
<surname>Camargo</surname>
<given-names>M. G. G.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
<name>
<surname>Batalha</surname>
<given-names>M. A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Plant phylogenetic diversity of tropical mountaintop rocky grasslands: Local and regional constraints</article-title>. <source>Plant Ecol.</source> <volume>220</volume> (<issue>12</issue>), <fpage>1119</fpage>&#x2013;<lpage>1129</lpage>. <pub-id pub-id-type="doi">10.1007/s11258-019-00982-5</pub-id>
</citation>
</ref>
<ref id="B45">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Maxwell</surname>
<given-names>A. E.</given-names>
</name>
<name>
<surname>Warner</surname>
<given-names>T. A.</given-names>
</name>
<name>
<surname>Fang</surname>
<given-names>F.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Implementation of machine-learning classification in remote sensing: An applied review</article-title>. <source>Int. J. Remote Sens.</source> <volume>39</volume>, <fpage>2784</fpage>&#x2013;<lpage>2817</lpage>. <pub-id pub-id-type="doi">10.1080/01431161.2018.1433343</pub-id>
</citation>
</ref>
<ref id="B46">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Meng</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Yu</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Qin</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Sun</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>J.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>Mapping of <italic>Kobresia pygmaea</italic> community based on unmanned aerial vehicle technology and gaofen remote sensing data in alpine meadow grassland: A case study in eastern of qinghai&#x2013;Tibetan plateau</article-title>. <source>Remote Sens.</source> <volume>13</volume> (<issue>13</issue>), <fpage>2483</fpage>. <pub-id pub-id-type="doi">10.3390/rs13132483</pub-id>
</citation>
</ref>
<ref id="B47">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Meng</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Lv</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>M.</given-names>
</name>
<etal/>
</person-group> (<year>2022</year>). <article-title>Mapping grassland classes using unmanned aerial vehicle and MODIS NDVI data for temperate grassland in inner Mongolia, China</article-title>. <source>Remote Sens.</source> <volume>14</volume> (<issue>9</issue>), <fpage>2094</fpage>. <pub-id pub-id-type="doi">10.3390/rs14092094</pub-id>
</citation>
</ref>
<ref id="B96">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
<name>
<surname>Alberton</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Alvarado</surname>
<given-names>S. T.</given-names>
</name>
<name>
<surname>Borges</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Buisson</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Camargo</surname>
<given-names>M. G. G.</given-names>
</name>
<etal/>
</person-group> (<year>2016</year>). <article-title>Linking plant phenology to conservation biology</article-title>. <source>Biol. Conserv.</source> <volume>195</volume>, <fpage>60&#x2013;72</fpage>. <pub-id pub-id-type="doi">10.1016/j.biocon.2015.12.033</pub-id>
</citation>
</ref>
<ref id="B48">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
<name>
<surname>Silveira</surname>
<given-names>F. A. O.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>Plant life in campo rupestre: New lessons from an ancient biodiversity hotspot</article-title>. <source>Flora</source> <volume>238</volume>, <fpage>1</fpage>&#x2013;<lpage>10</lpage>. <pub-id pub-id-type="doi">10.1016/j.flora.2017.12.001</pub-id>
</citation>
</ref>
<ref id="B49">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Navin</surname>
<given-names>M. S.</given-names>
</name>
<name>
<surname>Agilandeeswari</surname>
<given-names>L.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>Comprehensive review on land use/land cover change classification in remote sensing</article-title>. <source>J. Spectr. Imaging</source> <volume>9</volume>. <pub-id pub-id-type="doi">10.1255/jsi.2020.a8</pub-id>
</citation>
</ref>
<ref id="B50">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Neumann</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Behling</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Schindhelm</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Itzerott</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Weiss</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Wichmann</surname>
<given-names>M.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>The colors of heath flowering &#x2013; quantifying spatial patterns of phenology in Calluna life-cycle phases using high-resolution drone imagery</article-title>. <source>Remote Sens. Ecol. Conservation</source> <volume>6</volume> (<issue>1</issue>), <fpage>35</fpage>&#x2013;<lpage>51</lpage>. <pub-id pub-id-type="doi">10.1002/rse2.121</pub-id>
</citation>
</ref>
<ref id="B51">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nex</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Remondino</surname>
<given-names>F.</given-names>
</name>
</person-group> (<year>2013</year>). <article-title>UAV for 3D mapping applications: A review</article-title>. <source>Appl. Geomatics</source> <volume>6</volume> (<issue>1</issue>), <fpage>1</fpage>&#x2013;<lpage>15</lpage>. <pub-id pub-id-type="doi">10.1007/s12518-013-0120-x</pub-id>
</citation>
</ref>
<ref id="B52">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nguyen</surname>
<given-names>U.</given-names>
</name>
<name>
<surname>Glenn</surname>
<given-names>E. P.</given-names>
</name>
<name>
<surname>Dang</surname>
<given-names>T. D.</given-names>
</name>
<name>
<surname>Pham</surname>
<given-names>L. T. H.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Mapping vegetation types in semi-arid riparian regions using random forest and object-based image approach: A case study of the Colorado river ecosystem, grand canyon, Arizona</article-title>. <source>Ecol. Inf.</source> <volume>50</volume>, <fpage>43</fpage>&#x2013;<lpage>50</lpage>. <pub-id pub-id-type="doi">10.1016/j.ecoinf.2018.12.006</pub-id>
</citation>
</ref>
<ref id="B53">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Nogueira</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Santos</surname>
<given-names>J. A.</given-names>
</name>
<name>
<surname>Menini</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Silva</surname>
<given-names>T. S. F.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
<name>
<surname>Torres</surname>
<given-names>R. S.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Spatio-Temporal vegetation pixel classification by using Convolutional Networks</article-title>. <source>IEEE Geoscience Remote Sens. Lett.</source> <volume>16</volume> (<issue>10</issue>), <fpage>1665</fpage>&#x2013;<lpage>1669</lpage>.<pub-id pub-id-type="doi">10.1109/LGRS.2019.2903194</pub-id>
</citation>
</ref>
<ref id="B54">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Oddi</surname>
<given-names>F. J.</given-names>
</name>
<name>
<surname>Miguez</surname>
<given-names>F. E.</given-names>
</name>
<name>
<surname>Ghermandi</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Bianchi</surname>
<given-names>L. O.</given-names>
</name>
<name>
<surname>Garibaldi</surname>
<given-names>L. A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>A nonlinear mixed-effects modeling approach for ecological data: Using temporal dynamics of vegetation moisture as an example</article-title>. <source>Ecol. Evol.</source> <volume>9</volume>, <fpage>10225</fpage>&#x2013;<lpage>10240</lpage>. <pub-id pub-id-type="doi">10.1002/ece3.5543</pub-id>
</citation>
</ref>
<ref id="B55">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Park</surname>
<given-names>J. K.</given-names>
</name>
<name>
<surname>Muller-Landau</surname>
<given-names>H. C.</given-names>
</name>
<name>
<surname>Lichstein</surname>
<given-names>J. W.</given-names>
</name>
<name>
<surname>Rifai</surname>
<given-names>S. W.</given-names>
</name>
<name>
<surname>Dandois</surname>
<given-names>J. P.</given-names>
</name>
<name>
<surname>Bohlman</surname>
<given-names>S. A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Quantifying leaf phenology of individual trees and species in a tropical forest using Unmanned Aerial Vehicle (UAV) images</article-title>. <source>Remote Sens.</source> <volume>11</volume> (<issue>13</issue>), <fpage>1534</fpage>&#x2013;<lpage>34</lpage>. <pub-id pub-id-type="doi">10.3390/rs11131534</pub-id>
</citation>
</ref>
<ref id="B56">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Peci&#xf1;a</surname>
<given-names>M. V.</given-names>
</name>
<name>
<surname>Ward</surname>
<given-names>R. D.</given-names>
</name>
<name>
<surname>Bunce</surname>
<given-names>R. G. H.</given-names>
</name>
<name>
<surname>Sepp</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Kuusemets</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Luuk</surname>
<given-names>O.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Country-scale mapping of ecosystem services provided by semi-natural grasslands</article-title>. <source>Sci. Total Environ.</source> <volume>661</volume>, <fpage>212</fpage>&#x2013;<lpage>225</lpage>. <pub-id pub-id-type="doi">10.1016/j.scitotenv.2019.01.174</pub-id>
</citation>
</ref>
<ref id="B57">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pedregosa</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Varoguaux</surname>
<given-names>G.</given-names>
</name>
<name>
<surname>Gramfort</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Michel</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Thirion</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Grisel</surname>
<given-names>O.</given-names>
</name>
<etal/>
</person-group> (<year>2011</year>). <article-title>Scikit-learn: Machine learning in Python</article-title>. <source>J. Mach. Learn. Res.</source> <volume>12</volume>, <fpage>2825</fpage>&#x2013;<lpage>2830</lpage>.</citation>
</ref>
<ref id="B58">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Pichon</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Leroux</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Macombe</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Taylor</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Tisseyre</surname>
<given-names>B.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>What relevant information can be identified by experts on unmanned aerial vehicles&#x2019; visible images for precision viticulture?</article-title> <source>Precis. Agric.</source> <volume>20</volume>, <fpage>278</fpage>&#x2013;<lpage>294</lpage>. <pub-id pub-id-type="doi">10.1007/s11119-019-09634-0</pub-id>
</citation>
</ref>
<ref id="B59">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Prentice</surname>
<given-names>R. M.</given-names>
</name>
<name>
<surname>Peci&#xf1;a</surname>
<given-names>M. V.</given-names>
</name>
<name>
<surname>Ward</surname>
<given-names>R. D.</given-names>
</name>
<name>
<surname>Bergamo</surname>
<given-names>T. F.</given-names>
</name>
<name>
<surname>Joyce</surname>
<given-names>C. B.</given-names>
</name>
<name>
<surname>Sepp</surname>
<given-names>K.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Machine learning classification and accuracy assessment from high-resolution images of coastal wetlands</article-title>. <source>Remote Sens.</source> <volume>13</volume> (<issue>18</issue>), <fpage>3669</fpage>. <pub-id pub-id-type="doi">10.3390/rs13183669</pub-id>
</citation>
</ref>
<ref id="B60">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rapini</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Ribeiro</surname>
<given-names>P. L.</given-names>
</name>
<name>
<surname>Lambert</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Pirani</surname>
<given-names>J. R.</given-names>
</name>
</person-group> (<year>2008</year>). <article-title>A flora dos campos rupestres da Cadeia do Espinha&#xe7;o</article-title>. <source>Megadiversidade</source> <volume>4</volume>, <fpage>16</fpage>&#x2013;<lpage>24</lpage>.</citation>
</ref>
<ref id="B61">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Reis</surname>
<given-names>B. P.</given-names>
</name>
<name>
<surname>Martins</surname>
<given-names>S. V.</given-names>
</name>
<name>
<surname>Fernandes Filho</surname>
<given-names>E. I.</given-names>
</name>
<name>
<surname>Sarcinelli</surname>
<given-names>T. S.</given-names>
</name>
<name>
<surname>Gleriani</surname>
<given-names>J. M.</given-names>
</name>
<name>
<surname>Leite</surname>
<given-names>H. G.</given-names>
</name>
<etal/>
</person-group> (<year>2019</year>). <article-title>Forest restoration monitoring through digital processing of high resolution images</article-title>. <source>Ecol. Eng.</source> <volume>127</volume>, <fpage>178</fpage>&#x2013;<lpage>186</lpage>. <pub-id pub-id-type="doi">10.1016/j.ecoleng.2018.11.022</pub-id>
</citation>
</ref>
<ref id="B62">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Rodriguez-Galiano</surname>
<given-names>V. F.</given-names>
</name>
<name>
<surname>Guimire</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Rogan</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Chica-Olmo</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Rigol-Sanchez</surname>
<given-names>J. P.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>An assessment of the effectiveness of a random forest classifier for land-cover classification</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>67</volume>, <fpage>93</fpage>&#x2013;<lpage>104</lpage>. <pub-id pub-id-type="doi">10.1016/j.isprsjprs.2011.11.002</pub-id>
</citation>
</ref>
<ref id="B63">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Ruwaimana</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Satyanarayana</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Otero</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Muslim</surname>
<given-names>A. M.</given-names>
</name>
<name>
<surname>Muhammad Syafiq</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Ibrahim</surname>
<given-names>S.</given-names>
</name>
<etal/>
</person-group> (<year>2018</year>). <article-title>The advantages of using drones over space-borne imagery in the mapping of mangrove forests</article-title>. <source>PLoS ONE</source> <volume>13</volume>, <fpage>e0200288</fpage>. <pub-id pub-id-type="doi">10.1371/journal.pone.0200288</pub-id>
</citation>
</ref>
<ref id="B64">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sarker</surname>
<given-names>I. H.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Machine learning: Algorithms, real-world applications and research directions</article-title>. <source>SN Comput. Sci.</source> <volume>2</volume>, <fpage>160</fpage>. <pub-id pub-id-type="doi">10.1007/s42979-021-00592-x</pub-id>
</citation>
</ref>
<ref id="B65">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Schafer</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Heiskanen</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Heikinheimo</surname>
<given-names>V.</given-names>
</name>
<name>
<surname>Pellikka</surname>
<given-names>P.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Mapping tree species diversity of a tropical montane forest by unsupervised clustering of airborne imaging spectroscopy data</article-title>. <source>Ecol. Indic.</source> <volume>64</volume>, <fpage>49</fpage>&#x2013;<lpage>58</lpage>. <pub-id pub-id-type="doi">10.1016/j.ecolind.2015.12.026</pub-id>
</citation>
</ref>
<ref id="B66">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Schmidt</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Fassnacht</surname>
<given-names>F. E.</given-names>
</name>
<name>
<surname>Neff</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Lausch</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Kleinschmit</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Forster</surname>
<given-names>M.</given-names>
</name>
<etal/>
</person-group> (<year>2017</year>). <article-title>Adapting a Natura 2000 field guideline for a remote sensing-based assessment of heathland conservation status</article-title>. <source>Int. J. Appl. Earth Observation Geoinformation</source> <volume>60</volume>, <fpage>61</fpage>&#x2013;<lpage>71</lpage>. <pub-id pub-id-type="doi">10.1016/j.jag.2017.04.005</pub-id>
</citation>
</ref>
<ref id="B67">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Silveira</surname>
<given-names>F. A. O.</given-names>
</name>
<name>
<surname>Negreiros</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Barbosa</surname>
<given-names>N. P. U.</given-names>
</name>
<name>
<surname>Buisson</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Carmo</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Carstensen</surname>
<given-names>D.</given-names>
</name>
<etal/>
</person-group> (<year>2016</year>). <article-title>Ecology and evolution of plant diversity in the endangered campo rupestre: A neglected conservation priority</article-title>. <source>Plant Soil</source> <volume>403</volume>, <fpage>129</fpage>&#x2013;<lpage>152</lpage>. <pub-id pub-id-type="doi">10.1007/s11104-015-2637-8</pub-id>
</citation>
</ref>
<ref id="B68">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Singh</surname>
<given-names>K. K.</given-names>
</name>
<name>
<surname>Frazier</surname>
<given-names>A. E.</given-names>
</name>
</person-group> (<year>2018</year>). <article-title>A meta-analysis and review of unmanned aircraft system (UAS) imagery for terrestrial applications</article-title>. <source>Int. J. Remote Sens.</source> <volume>39</volume>, <fpage>5078</fpage>&#x2013;<lpage>5098</lpage>. <pub-id pub-id-type="doi">10.1080/01431161.2017.1420941</pub-id>
</citation>
</ref>
<ref id="B69">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Streher</surname>
<given-names>A. B.</given-names>
</name>
<name>
<surname>S Sobreiro</surname>
<given-names>J. F.</given-names>
</name>
<name>
<surname>Morellato</surname>
<given-names>L. P. C.</given-names>
</name>
<name>
<surname>Silva</surname>
<given-names>T. S. F.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Land surface phenology in the tropics: The role of climate and topography in a snow-free mountain</article-title>. <source>Ecosystems</source> <volume>20</volume>, <fpage>1436</fpage>&#x2013;<lpage>1453</lpage>. <pub-id pub-id-type="doi">10.1007/s10021-017-0123-2</pub-id>
</citation>
</ref>
<ref id="B70">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Tang</surname>
<given-names>G. L.</given-names>
</name>
<name>
<surname>Yuan</surname>
<given-names>S. X.</given-names>
</name>
<name>
<surname>Lin</surname>
<given-names>Z. W.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Small unmanned aerial vehicles for low-altitude remote sensing and its application progress in ecology</article-title>. <source>J. Appl. Ecol.</source> <volume>28</volume>, <fpage>528</fpage>&#x2013;<lpage>536</lpage>. <pub-id pub-id-type="doi">10.13287/j.1001-9332.201702.030</pub-id>
</citation>
</ref>
<ref id="B71">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Sun</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Yang</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Xie</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Huang</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>UAVs as remote sensing platforms in plant ecology: Review of applications and challenges</article-title>. <source>J. Plant Ecol.</source> <volume>16</volume> (<issue>6</issue>), <fpage>1003</fpage>&#x2013;<lpage>1023</lpage>. <pub-id pub-id-type="doi">10.1093/jpe/rtab089</pub-id>
</citation>
</ref>
<ref id="B72">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Thessen</surname>
<given-names>A. E.</given-names>
</name>
</person-group> (<year>2016</year>). <article-title>Adoption of machine learning techniques in ecology and Earth science</article-title>. <source>One Ecosyst.</source> <volume>1</volume>, <fpage>e8621</fpage>. <pub-id pub-id-type="doi">10.3897/oneeco.1.e8621</pub-id>
</citation>
</ref>
<ref id="B73">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Tsai</surname>
<given-names>C. H.</given-names>
</name>
<name>
<surname>Lin</surname>
<given-names>Y. C.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>An accelerated image matching technique for UAV orthoimage registration</article-title>. <source>ISPRS J. Photogrammetry Remote Sens.</source> <volume>128</volume>, <fpage>130</fpage>&#x2013;<lpage>145</lpage>. <pub-id pub-id-type="doi">10.1016/j.isprsjprs.2017.03.017</pub-id>
</citation>
</ref>
<ref id="B74">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Valbuena</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>O&#x2019;Connor</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Zellweger</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Simonson</surname>
<given-names>W.</given-names>
</name>
<name>
<surname>Vihervaara</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Maltamo</surname>
<given-names>M.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Standardizing ecosystem morphological traits from 3D information sources</article-title>. <source>Trends Ecol. Evol.</source> <volume>35</volume>, <fpage>656</fpage>&#x2013;<lpage>667</lpage>. <pub-id pub-id-type="doi">10.1016/j.tree.2020.03.006</pub-id>
</citation>
</ref>
<ref id="B75">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Vasconcelos</surname>
<given-names>T. N. C.</given-names>
</name>
<name>
<surname>Alcantara</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Andrino</surname>
<given-names>C. O.</given-names>
</name>
<name>
<surname>Forest</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Reginato</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Simon</surname>
<given-names>M. F.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Fast diversification through a mosaic of evolutionary histories characterizes the endemic flora of ancient Neotropical mountains</article-title>. <source>Proc. R. Soc. Biol. Sci.</source> <volume>287</volume>, <fpage>20192933</fpage>. <pub-id pub-id-type="doi">10.1098/rspb.2019.2933</pub-id>
</citation>
</ref>
<ref id="B76">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Villoslada</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Bergamo</surname>
<given-names>T. F.</given-names>
</name>
<name>
<surname>Ward</surname>
<given-names>R. D.</given-names>
</name>
<name>
<surname>Burnside</surname>
<given-names>N. G.</given-names>
</name>
<name>
<surname>Joyce</surname>
<given-names>C. B.</given-names>
</name>
<name>
<surname>Bunce</surname>
<given-names>R. G. H.</given-names>
</name>
<etal/>
</person-group> (<year>2020</year>). <article-title>Fine-scale plant community assessment in coastal meadows using UAV-based multispectral data</article-title>. <source>Ecol. Indic.</source> <volume>111</volume>, <fpage>105979</fpage>. <pub-id pub-id-type="doi">10.1016/j.ecolind.2019.105979</pub-id>
</citation>
</ref>
<ref id="B77">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Walker</surname>
<given-names>B. H.</given-names>
</name>
<name>
<surname>Noy-Meir</surname>
<given-names>I.</given-names>
</name>
</person-group> (<year>1982</year>). &#x201c;<article-title>Aspects of the stability and resilience of savanna ecosystems. Ecology of Tropical Savannas</article-title>,&#x201d; in <source>Ecology of tropical savannas: Ecological studies (analysis and synthesis)</source>. Editors <person-group person-group-type="editor">
<name>
<surname>Huntley</surname>
<given-names>B. J.</given-names>
</name>
<name>
<surname>Walker</surname>
<given-names>B. H.</given-names>
</name>
</person-group> (<publisher-loc>Berlin, Heidelberg</publisher-loc>: <publisher-name>Springer</publisher-name>).</citation>
</ref>
<ref id="B78">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Han</surname>
<given-names>D.</given-names>
</name>
<name>
<surname>Mu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Jiang</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Yao</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Bai</surname>
<given-names>Y.</given-names>
</name>
<etal/>
</person-group> (<year>2019a</year>). <article-title>Landscape-level vegetation classification and fractional woody and herbaceous vegetation cover estimation over the dryland ecosystems by unmanned aerial vehicle platform</article-title>. <source>Agric. For. Meteorology</source> <volume>278</volume>, <fpage>107665</fpage>. <pub-id pub-id-type="doi">10.1016/j.agrformet.2019.107665</pub-id>
</citation>
</ref>
<ref id="B79">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Wang</surname>
<given-names>Z.</given-names>
</name>
<name>
<surname>Townsend</surname>
<given-names>P. A.</given-names>
</name>
<name>
<surname>Schweiger</surname>
<given-names>A. K.</given-names>
</name>
<name>
<surname>Couture</surname>
<given-names>J. J.</given-names>
</name>
<name>
<surname>Singh</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Hobbie</surname>
<given-names>S. E.</given-names>
</name>
<etal/>
</person-group> (<year>2019b</year>). <article-title>Mapping foliar functional traits and their uncertainties across three years in a grassland experiment</article-title>. <source>Remote Sens. Environ.</source> <volume>221</volume>, <fpage>405</fpage>&#x2013;<lpage>416</lpage>. <pub-id pub-id-type="doi">10.1016/j.rse.2018.11.016</pub-id>
</citation>
</ref>
<ref id="B80">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Watts</surname>
<given-names>A. C.</given-names>
</name>
<name>
<surname>Ambrosia</surname>
<given-names>V. G.</given-names>
</name>
<name>
<surname>Hinkley</surname>
<given-names>E. A.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Unmanned aircraft systems in remote sensing and scientific research: Classification and considerations of use</article-title>. <source>Remote Sens.</source> <volume>4</volume>, <fpage>1671</fpage>&#x2013;<lpage>1692</lpage>. <pub-id pub-id-type="doi">10.3390/rs4061671</pub-id>
</citation>
</ref>
<ref id="B81">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Westoby</surname>
<given-names>M. J.</given-names>
</name>
<name>
<surname>Brasington</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Glasser</surname>
<given-names>N. F.</given-names>
</name>
<name>
<surname>Hambrey</surname>
<given-names>M. J.</given-names>
</name>
<name>
<surname>Reynolds</surname>
<given-names>J. M.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Structure from motion photogrammetry: A low-cost, effective tool for geoscience applications</article-title>. <source>Geomorphology</source> <volume>179</volume>, <fpage>300</fpage>&#x2013;<lpage>314</lpage>. <pub-id pub-id-type="doi">10.1016/j.geomorph.2012.08.021</pub-id>
</citation>
</ref>
<ref id="B82">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Whitehead</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Hugenholtz</surname>
<given-names>C. H.</given-names>
</name>
</person-group> (<year>2014</year>). <article-title>Remote sensing of the environment with small unmanned aircraft systems (UASs), part 1: A review of progress and challenges</article-title>. <source>NRC Res. Press</source> <volume>2</volume>, <fpage>69</fpage>&#x2013;<lpage>85</lpage>. <pub-id pub-id-type="doi">10.1139/juvs-2014-0006</pub-id>
</citation>
</ref>
<ref id="B83">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Xie</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Welsh</surname>
<given-names>W.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Mapping wetlands and &#x3c;I&#x26;gt;Phragmites&#x26;lt;/I&#x26;gt; using publically available remotely sensed images</article-title>. <source>Photogrammetric Eng. Remote Sens.</source> <volume>81</volume>, <fpage>69</fpage>&#x2013;<lpage>78</lpage>. <pub-id pub-id-type="doi">10.14358/PERS.81.1.69</pub-id>
</citation>
</ref>
<ref id="B84">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yao</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>Qin</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Chen</surname>
<given-names>X. X.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>Unmanned aerial vehicle for remote sensing applications&#x2014;A review</article-title>. <source>Remote Sens.</source> <volume>11</volume> (<issue>12</issue>), <fpage>1443</fpage>. <pub-id pub-id-type="doi">10.3390/rs11121443</pub-id>
</citation>
</ref>
<ref id="B85">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zhang</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>L.</given-names>
</name>
<name>
<surname>Zhang</surname>
<given-names>B. Y. J.</given-names>
</name>
<name>
<surname>Sun</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Dong</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Wang</surname>
<given-names>X.</given-names>
</name>
<etal/>
</person-group> (<year>2021</year>). <article-title>Land cover classification in a mixed forest-grassland ecosystem using LResU-net and UAV imagery</article-title>. <source>J. For. Res.</source> <volume>33</volume>, <fpage>923</fpage>&#x2013;<lpage>936</lpage>. <pub-id pub-id-type="doi">10.1007/s11676-021-01375-z</pub-id>
</citation>
</ref>
<ref id="B86">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Zou</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Greenberg</surname>
<given-names>J. A.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>A spatialized classification approach for land cover mapping using hyperspatial imagery</article-title>. <source>Remote Sens. Environ.</source> <volume>232</volume>, <fpage>111248</fpage>. <pub-id pub-id-type="doi">10.1016/j.rse.2019.111248</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>