<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article article-type="research-article" dtd-version="2.3" xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Energy Res.</journal-id>
<journal-title>Frontiers in Energy Research</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Energy Res.</abbrev-journal-title>
<issn pub-type="epub">2296-598X</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="publisher-id">1499751</article-id>
<article-id pub-id-type="doi">10.3389/fenrg.2025.1499751</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Energy Research</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Bayesian optimisation algorithm based optimised deep bidirectional long short term memory for global horizontal irradiance prediction in long-term horizon</article-title>
<alt-title alt-title-type="left-running-head">Madhiarasan</alt-title>
<alt-title alt-title-type="right-running-head">
<ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3389/fenrg.2025.1499751">10.3389/fenrg.2025.1499751</ext-link>
</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Madhiarasan</surname>
<given-names>Manoharan</given-names>
</name>
<xref ref-type="corresp" rid="c001">&#x2a;</xref>
<uri xlink:href="https://loop.frontiersin.org/people/1535944/overview"/>
<xref ref-type="fn" rid="fn1">
<sup>&#x2020;</sup>
</xref>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/Writing - review &#x26; editing/"/>
</contrib>
</contrib-group>
<aff>
<institution>Department of Business Development and Technology</institution>, <institution>Aarhus School of Business and Social Sciences (BSS)</institution>, <institution>Aarhus University</institution>, <addr-line>Herning</addr-line>, <country>Denmark</country>
</aff>
<author-notes>
<fn fn-type="edited-by">
<p>
<bold>Edited by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1882335/overview">Praveen Kumar Balachandran</ext-link>, Universiti Kebangsaan Malaysia, Malaysia</p>
</fn>
<fn fn-type="edited-by">
<p>
<bold>Reviewed by:</bold> <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/1589398/overview">Mohit Bajaj</ext-link>, Graphic Era University, India</p>
<p>
<ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/2859742/overview">Vankadara Sampath Kumar</ext-link>, Malla Reddy Engineering College, India</p>
</fn>
<corresp id="c001">&#x2a;Correspondence: Manoharan Madhiarasan, <email>mmadhiarasan89@gmail.com</email>, <email>mmadhiarasan@btech.au.dk</email>
</corresp>
<fn fn-type="other" id="fn1">
<label>
<sup>&#x2020;</sup>
</label>
<p>
<bold>ORCID:</bold> Manoharan Madhiarasan, <ext-link ext-link-type="uri" xlink:href="https://orcid.org/0000-0003-2552-0400">https://orcid.org/0000-0003-2552-0400</ext-link>
</p>
</fn>
</author-notes>
<pub-date pub-type="epub">
<day>19</day>
<month>02</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2025</year>
</pub-date>
<volume>13</volume>
<elocation-id>1499751</elocation-id>
<history>
<date date-type="received">
<day>21</day>
<month>09</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>20</day>
<month>01</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#xa9; 2025 Madhiarasan.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Madhiarasan</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>With the continued development and progress of industrialisation, modernisation, and smart cities, global energy demand continues to increase. Photovoltaic systems are used to control CO<sub>2</sub> emissions and manage global energy demand. Photovoltaic (PV) system public utility, effective planning, control, and operation compels accurate Global Horizontal Irradiance (GHI) prediction. This paper is ardent about designing a novel hybrid GHI prediction method: Bayesian Optimisation algorithm-based Optimized Deep Bidirectional Long Short Term Memory (BOA-D-BiLSTM). This work attempts to fine-tune the Deep Bidirectional Long Short Term Memory hyperparameters employing Bayesian optimisation. Globally ranked fifth in solar photovoltaic deployment, the INDIA Two Region Solar Irradiance Dataset from the NOAA-National Oceanic and Atmospheric Administration was used to assess the proposed BOA-D-BiLSTM approach for the long-term prediction horizon. The superior prediction performance of the proposed BOA-D-BiLSTM is highlighted with the help of experimental results and comparative analysis with grid search and random search. Furthermore, the forecasting effectiveness is compared with other models, namely, the Persistence Model, ARIMA, BPN, RNN, SVR, Boosted Tree, LSTM, and BiLSTM. Compared to other forecasting models according to the resulting evaluation error metrics, the suggested BOA-D-BiLSTM model has minor evaluation error metrics, namely, Root Mean Squared Error: 0.0026 and 0.0030, Mean Absolute Error:0.0016 and 0.0018, Mean-Squared Error: 6.6852 &#xd7; 10<sup>&#x2212;06</sup> and 8.8628 &#xd7; 10<sup>&#x2212;06</sup> and R-squared: 0.9994 and 0.9988 on both dataset 1 and 2 respectively. The proposed BOA-D-BiLSTM model outperforms other baseline models. Thus, the proposed BOA-D-BiLSTM is a viable and novel potential forecasting model for effective distributed generation planning and control.</p>
</abstract>
<kwd-group>
<kwd>deep learning</kwd>
<kwd>bidirectional long short term memory</kwd>
<kwd>bayesian optimisation algorithm</kwd>
<kwd>hyperparameters</kwd>
<kwd>prediction</kwd>
<kwd>long-term horizon</kwd>
<kwd>and global horizontal irradiance</kwd>
</kwd-group>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Solar Energy</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec id="s1">
<title>1 Introduction</title>
<p>Within the renewable energy canopy, solar energy has emerged as a potentially viable energy source to mitigate the increase in greenhouse gas emissions from fossil fuel combustion and address the ongoing energy issue (<xref ref-type="bibr" rid="B11">Madhiarasan, 2018</xref>). According to the Ministry of New and Renewable Energy (MNRE) report on 30 November 2022, India is ranked fifth globally in solar energy deployment, stating that around 70.10&#xa0;GW (Gigawatt) of installed solar power is now available in India (<xref ref-type="bibr" rid="B15">Ministry of New and Renewable Energy, 2024</xref>). Global Horizontal Irradiance (GHI) prediction is necessary for solar energy systems to be deployed effectively because it affects grid stability, system design, and energy planning. Long-term GHI predictions are necessary for strategic planning, guaranteeing appropriate decisions on infrastructure expenditures, resource allocation, and policy creation. As the world moves toward energy from solar energy sources, the need for accurate long-term forecasting has increased since it helps energy suppliers anticipate fluctuation, maximise solar integration and reduce dependency on fossil fuels. Precise solar GHI forecasting is vital for the safe and effective integration of solar power into the grid system. The photovoltaic energy is unstable because of the inherent intermittency of solar irradiance and other climatic conditions. As a result, effective solar irradiance forecasting is regarded as one of the most essential issues in the power grid. Effective planning, balancing, and controlling requires forecasting the GHI in PV systems penetrating energy consumption and production. Long Short Term Memory (LSTM) proved effective in time series and sequence modelling applications. The Bidirectional LSTM is a variant of the LSTM to benefit from better results for real-time applications. Over the past 2&#xa0;decades, time series, sequence learning, and other real-time applications have widely used BiLSTM (Bidirectional Long Short Term Memory). Fixing optimal hyperparameters requires experience and skill. Manual exploration of optimal hyperparameters is a relatively complex and time-consuming task. Optimisation of hyperparameter setup looks complicated since it varies based on the task to be performed, the dataset to be utilised, and so on, making each circumstance unique. Hyperparameters fine-tuning using trial-and-error, grid search, and random search are well-known methods but have less efficient, computationally costly, and uncertain issues, respectively. In a BiLSTM, significant performance changes occur because of the various hyperparameters. Optimal selection of the hyperparameter leads to outstanding and competing performances.</p>
<p>Estimating the optimal values of the hyperparameters is perplexing and requires much computational effort and cost. Optimisation algorithm-based hyperparameter fine-tuning is vital for better performance. Bayesian optimisation is effective and sensible for identifying the optimal hyperparameters of the BiLSTM. Numerous prediction models have been designed to predict the sporadic and non-linear solar irradiance time series (Global Horizontal Irradiance). However, most of these models bypass the application of Bayesian optimisation to maximise the hyperparameters throughout the training process for neural networks. The present work aims to bridge the gap using a new hybrid prediction approach (BOA-D-BiLSTM). Therefore, this paper strives to use Bayesian optimisation-based BiLSTM hyperparameter optimisation for the global solar irradiance (GHI) prediction task.</p>
<p>The following are the significant contributions of the paper:<list list-type="simple">
<list-item>
<p>&#x2022; This research contribution provides a robust and reliable prediction model to bridge the gap between existing models and the growing demand for generic and precise prediction.</p>
</list-item>
<list-item>
<p>&#x2022; The Bayesian optimisation algorithm is used to optimise the hyperparameters of Deep BiLSTM</p>
</list-item>
<list-item>
<p>&#x2022; Hyperparameter optimisation based on the Bayesian optimisation algorithm is more effective and precise than grid and random search.</p>
</list-item>
<list-item>
<p>&#x2022; Using two locations of real-time data from India, the analysis highlights the outstanding performance of the proposed BOA-D-BiLSTM model for long-term GHI forecasting.</p>
</list-item>
<list-item>
<p>&#x2022; The BOA-D-BiLSTM model proposed in this research work was assessed and compared with eight baseline models on two data sets. Ultimately, it was discovered that the BOA-D-BiLSTM model performs better than the eight considered baseline models in terms of the least evaluation error metrics such as RMSE (Root Mean Squared Error), MAE (Mean Absolute Error), R<sup>2</sup> (R-squared), and MSE (Mean Squared Error).</p>
</list-item>
</list>
</p>
<p>
<xref ref-type="sec" rid="s1">Section 1</xref> introduces, discusses motivation, contributes, and highlights the proposed prediction model. <xref ref-type="sec" rid="s2">Section 2</xref> studied related work concerning the prediction of GHI and the importance of hyperparameter fine-tuning. <xref ref-type="sec" rid="s3">Section 3</xref> presents the description and concepts of the proposed prediction model. <xref ref-type="sec" rid="s4">Section 4</xref> discusses the details of the experimental simulation. <xref ref-type="sec" rid="s5">Section 5</xref> discusses the results and comparative analysis. <xref ref-type="sec" rid="s6">Section 6</xref> draws the summarised conclusion of the proposed prediction model, limitations, and future research direction.</p>
</sec>
<sec id="s2">
<title>2 Brief of solar irradiance prediction related work</title>
<p>Researchers and other professionals are increasingly advocating integrating solar energy into smart grids to moderate the dependence on fossil fuels while simultaneously encouraging the ecological and preservation of the environment. Thus, accurate and reliable global horizontal irradiance (GHI) forecasting is essential for predicting the prospective solar power era. The recurrent neural network with optimised hyperparameters performs better GHI prediction. Finding the optimal set of hyperparameters from all possible combinations is challenging if the number of hyperparameters is higher. The Bayesian optimisation algorithm can mitigate the LSTM hyperparameter optimisation problem. The hyperparameters of the GPR (Gaussian process regression) were optimised using Bayesian optimisation for the COVID-19 case forecasting (<xref ref-type="bibr" rid="B1">Alali et al., 2022</xref>). <xref ref-type="bibr" rid="B16">Mohammadi et al. (2015)</xref> predicted horizontal global solar radiation and used support vector regression (SVR). Two SVRs, the RBF (radial basis function) and the poly (polynomial basis function)&#x2014;were used to examine the performance of long-term observations for a city in a sunny region of Iran. Moreover, SVR-rf significantly qualified for HGSR prediction utilising n and N, outperforming SVR-poly on accuracy. The gradient-boosted regression tree (GBRT) model, suggested by <xref ref-type="bibr" rid="B18">Persson et al. (2017)</xref>, constitutes a nonparametric machine learning technique employed in multi-site solar power generation prediction over a forecast timeframe of 1&#x2013;6&#xa0;h. In contrast to GBRT model variants and single-site linear autoregressive models overall forecast horizons, the multi-site model showcases superior outcomes based on root mean squared error.</p>
<p>
<xref ref-type="bibr" rid="B21">Yu et al. (2019)</xref> In this analysis, short-term predictions are made using an LSTM-based technique based on a timeline that includes global horizontal irradiance (GHI) in Atlanta and Hawaii 1&#xa0;hour and 1&#xa0;day ahead. According to the daily forecast results, LSTM continues to be more accurate than other models and successfully boosts RNN. <xref ref-type="bibr" rid="B2">Benamrou et al. (2020)</xref> introduced a novel hybrid method for predicting GHI hourly in Al-Hoceima, Morocco. The lagged satellite-derived GHI encompassing the point of interest was determined to be the most pertinent feature for this purpose using a deep long- and short-term memory network, Xgboost, Random Forest, and Recursive Feature Elimination with cross-validation. The selected feature was input to the suggested model, and the Grid Search algorithm was used to select the best prediction model. <xref ref-type="bibr" rid="B8">Jumin et al. (2021)</xref> Based on data collected in Malaysia, the enhanced decision tree regression (BDTR) model was implemented to forecast variations in solar radiation. The suggested model was then contrasted with neural networks and linear regression. Sensitivity and uncertainty analysis was included to verify the truthfulness of the recommended model. <xref ref-type="bibr" rid="B20">Xiang et al. (2021)</xref> proposed a hybrid machine-learning algorithm to predict solar power accurately. The Persistence Extreme Learning Machine (P-ELM) algorithm trains the hybrid model, which performs more proficiently in short-term forecasting than the ELM algorithm.</p>
<p>
<xref ref-type="bibr" rid="B4">Bou-Rabee et al. (2022)</xref> To obtain precise solar irradiation predictions, a DL model is built that uses the attention mechanism employed in bidirectional long short term memory (BiLSTM) is built. The suggested model works on both sunny and cloudy days to provide better accuracy in various weather situations. <xref ref-type="bibr" rid="B12">Madhiarasan and Louzazni (2022)</xref> considered various influencing meteorological parameters as inputs for a novel Combined Long Short-Term Memory Networks (CLSTMN). The suggested prediction model is aggregated and compounded by many inputs associated with six distinct long short-term memory models built to increase the accuracy and generalisation of solar irradiance prediction. <xref ref-type="bibr" rid="B19">Vijay and Saravanan (2022)</xref>, a Bayesian optimisation-based regression tree (BORT) machine learning technique was used to anticipate the values of global horizontal irradiance (GHI). The analysis of Bayesian optimisation&#x2019;s performance against grid and random search methods has demonstrated its validity. <xref ref-type="bibr" rid="B13">Medina-Santana et al. (2022)</xref> Two deep learning models (feed forward and recurrent neural networks) are utilised to forecast renewable sources over a long period for a site in Michoacan, Mexico, to address the uncertainty surrounding solar resources. <xref ref-type="bibr" rid="B14">Michael et al. (2022)</xref> By incorporating stacked LSTM layers, dropout architecture, and LSTM-based model, this work suggests an optimised stacked Bidirectional Long Short Term Memory (BiLSTM)/Long Short Term Memory (LSTM) model for predicting univariate and multivariate hourly time series data. The model&#x2019;s performance is improved using Bayesian optimisation to adjust six pertinent hyperparameters. The accuracy of the predicted results is equivalent for both GHI and POA (Plane of Array) when using real-world solar data from the Sweihan Photovoltaic Independent Power plant in Abu Dhabi, UAE, and NREL solar data for year-round data.</p>
<p>
<xref ref-type="bibr" rid="B10">Li et al. (2023)</xref> presented long- and short-term memory networks (BO-LSTM) with a Bayesian algorithm for short-term heat load forecasting. To remove noise from the data, apply the moving average data smoothing technique. The model&#x2019;s input is ascertained using Pearson&#x2019;s correlation analysis. <xref ref-type="bibr" rid="B5">Chodakowska et al. (2023)</xref> employed data from Poland and Jordan to examine autoregressive integrated moving average (ARIMA) models for the seasonal forecasting of solar radiation under various climatic situations. ARIMA models are used to anticipate solar radiation and may assist in ensuring that solar energy is steadily and firmly integrated into national systems. <xref ref-type="bibr" rid="B9">Krishnan et al. (2024)</xref> An ensemble model that uses gradient boosting was built and suggested for India&#x2019;s different climate zones for forecasting hourly global horizontal irradiance. The autoregressive integrated moving average (ARIMA), 2-layer feed-forward neural network, and long short-term memory (LSTM) are used as benchmarks for the gradient boost-based model; it is noticed an improved set of performance measures was attained by gradient boosting. <xref ref-type="bibr" rid="B7">Herrera-Casanova et al. (2024)</xref> deployed the Bayesian optimisation algorithm and BiLSTM in conjunction with bootstrap resampling for interval predictions to provide hour-ahead photovoltaic power prediction. A Bayesian optimisation algorithm (BOA) is used in conjunction with the BiLSTM model to optimise its primary hyperparameters and improve its prediction performance. The suggested model reduces the normalised mean absolute error (nMAE) compared to the MLP and RF comparison models.</p>
<p>The literature shows that identifying optimal hyperparameters in bidirectional long short term memory is crucial, complicated, time-consuming, and expensive. Predicting solar irradiance with accuracy has become much more crucial in light of the growing use of solar energy in power and energy systems globally. Several studies use ML or DL models for PV solar energy forecasting, as shown in the literature study. It is clear from the literature review that using hybrid models, which combine many basic models with optimal hyperparameters, improves prediction accuracy. Still, other research suggests extremely complicated models that require extended training. Although research indicates that hyperparameter tuning can improve prediction model accuracy, some studies carry out this modification by trial and error, taking a long period without ensuring optimal correspondence. With motivation from the existing literature, the BiLSTM optimal hyperparameter set is automatically fine-tuned and identified using the Bayesian optimisation algorithm applied for solar GHI prediction using the two India region datasets in this research work.</p>
</sec>
<sec id="s3">
<title>3 Proposed bayesian optimisation algorithm-based optimized deep bidirectional long short term memory (BOA-D-BiLSTM)</title>
<p>The proposed BOA-D-BiLSTM mathematical modelling and descriptions are detailed below:</p>
<sec id="s3-1">
<title>3.1 Deep bidirectional long short term memory network (deep BiLSTM)</title>
<p>The unique feature is the information process from the past to the future (forward flow) and from the future to the past (backward flow) (<xref ref-type="bibr" rid="B6">Graves and Schmidhuber, 2005</xref>). It consists of two LSTM layers, one for the forward information flow and the other for the backward information flow. BiLSTM can extract more complex relationships from the input sequence by considering historical and longitudinal data. By combining the two LSTM layers, the final output is obtained. The prediction may be required to be decided jointly by the previous and subsequent inputs. Improvement in prediction performance uses forward and backward transitions. Thus, making the network learn better and more feasible for real-time applications. Building the deep architecture model involves stacking more than one BiLSTM. <xref ref-type="fig" rid="F1">Figure 1</xref> shows the simplified BiLSTM model.</p>
<fig id="F1" position="float">
<label>FIGURE 1</label>
<caption>
<p>Simplified BiLSTM model.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g001.tif"/>
</fig>
<p>BiLSTM simplified mathematical model representation in terms of matrix form.<disp-formula id="e1">
<mml:math id="m1">
<mml:mrow>
<mml:mrow>
<mml:mfenced open="[" close="]" separators="|">
<mml:mrow>
<mml:mtable columnalign="left">
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>F</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>I</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>C</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>O</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mo>&#x3d;</mml:mo>
<mml:mrow>
<mml:mfenced open="[" close="]" separators="|">
<mml:mrow>
<mml:mtable columnalign="left">
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>tan</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:mrow>
<mml:mfenced open="[" close="]" separators="|">
<mml:mrow>
<mml:mtable columnalign="left">
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>F</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>I</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>C</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>O</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:mrow>
<mml:mfenced open="[" close="]" separators="|">
<mml:mrow>
<mml:mtable columnalign="left">
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>S</mml:mi>
<mml:mi>F</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>S</mml:mi>
<mml:mi>I</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>S</mml:mi>
<mml:mi>C</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>S</mml:mi>
<mml:mi>O</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:msub>
<mml:mi>S</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:mrow>
<mml:mfenced open="[" close="]" separators="|">
<mml:mrow>
<mml:mtable columnalign="left">
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>F</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>I</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>C</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>O</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(1)</label>
</disp-formula>
</p>
<p>Mathematical model of BiLSTM and gate updating<disp-formula id="e2">
<mml:math id="m2">
<mml:mrow>
<mml:mtext>Forget&#x2009;Gate</mml:mtext>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>F</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>F</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>S</mml:mi>
<mml:mi>F</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>S</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>F</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(2)</label>
</disp-formula>
<disp-formula id="e3">
<mml:math id="m3">
<mml:mrow>
<mml:mtext>Input&#x2009;Gate</mml:mtext>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>I</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>I</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>S</mml:mi>
<mml:mi>I</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>S</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>I</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(3)</label>
</disp-formula>
<disp-formula id="e4">
<mml:math id="m4">
<mml:mrow>
<mml:mtext>Cell&#x2009;agent&#x2009;</mml:mtext>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>C</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>tan</mml:mi>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>C</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>S</mml:mi>
<mml:mi>C</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>S</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>C</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(4)</label>
</disp-formula>
<disp-formula id="e5">
<mml:math id="m5">
<mml:mrow>
<mml:mtext>Output&#x2009;Gate</mml:mtext>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>O</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mi>O</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>S</mml:mi>
<mml:mi>O</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>S</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>O</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(5)</label>
</disp-formula>
<disp-formula id="e6">
<mml:math id="m6">
<mml:mrow>
<mml:mtext>Hidden&#x2009;State&#x2009;</mml:mtext>
<mml:msub>
<mml:mi>Q</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>O</mml:mi>
</mml:msub>
<mml:mo>&#x2218;</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>tanh</mml:mi>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(6)</label>
</disp-formula>
<disp-formula id="e7">
<mml:math id="m7">
<mml:mrow>
<mml:mtext>Cell&#x2009;State&#x2009;</mml:mtext>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:mtext>Memory&#x2009;cell&#x2009;output</mml:mtext>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mtext>&#x2009;</mml:mtext>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>F</mml:mi>
</mml:msub>
<mml:mo>&#x2218;</mml:mo>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>I</mml:mi>
</mml:msub>
<mml:mo>&#x2218;</mml:mo>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>C</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
<label>(7)</label>
</disp-formula>
</p>
<p>The BiLSTM output vector<disp-formula id="e8">
<mml:math id="m8">
<mml:mrow>
<mml:msub>
<mml:mi>P</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mover accent="true">
<mml:mi>S</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
<mml:mi>P</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mover accent="true">
<mml:mi>S</mml:mi>
<mml:mo>&#x2190;</mml:mo>
</mml:mover>
<mml:mi>P</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x20d6;</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mi>P</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
<label>(8)</label>
</disp-formula>
<disp-formula id="e9">
<mml:math id="m9">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mover accent="true">
<mml:mi>S</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mover accent="true">
<mml:mi>S</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
<mml:mover accent="true">
<mml:mi>S</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(9)</label>
</disp-formula>
<disp-formula id="e10">
<mml:math id="m10">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x20d6;</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mi>R</mml:mi>
<mml:mover accent="true">
<mml:mi>S</mml:mi>
<mml:mo>&#x2190;</mml:mo>
</mml:mover>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mrow>
<mml:mover accent="true">
<mml:mi>S</mml:mi>
<mml:mo>&#x2190;</mml:mo>
</mml:mover>
<mml:mover accent="true">
<mml:mi>S</mml:mi>
<mml:mo>&#x2190;</mml:mo>
</mml:mover>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x2190;</mml:mo>
</mml:mover>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
<mml:mo>&#x2b;</mml:mo>
<mml:msub>
<mml:mi>B</mml:mi>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x2190;</mml:mo>
</mml:mover>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(10)</label>
</disp-formula>
</p>
<p>Where, <inline-formula id="inf1">
<mml:math id="m11">
<mml:mrow>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>- Sigmoidal activation function for gates, <inline-formula id="inf2">
<mml:math id="m12">
<mml:mrow>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>tanh</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>- Tangent activation function, <inline-formula id="inf3">
<mml:math id="m13">
<mml:mrow>
<mml:mo>&#x2218;</mml:mo>
</mml:mrow>
</mml:math>
</inline-formula>- Hadamard Product (element-wise product), <inline-formula id="inf4">
<mml:math id="m14">
<mml:mrow>
<mml:mi>V</mml:mi>
</mml:mrow>
</mml:math>
</inline-formula>-weighted connection of the gates, respectively forget gate, input gate, cell gate and output gate, <inline-formula id="inf5">
<mml:math id="m15">
<mml:mrow>
<mml:mi>W</mml:mi>
</mml:mrow>
</mml:math>
</inline-formula>-weighted connection between the output state to the input state cell, <inline-formula id="inf6">
<mml:math id="m16">
<mml:mrow>
<mml:msub>
<mml:mi>S</mml:mi>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> -time stamp n-1 past hidden state output, <inline-formula id="inf7">
<mml:math id="m17">
<mml:mrow>
<mml:msub>
<mml:mi>Q</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>- time stamp n hidden state, <inline-formula id="inf8">
<mml:math id="m18">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> - current time stamp input, <inline-formula id="inf9">
<mml:math id="m19">
<mml:mrow>
<mml:mi>B</mml:mi>
</mml:mrow>
</mml:math>
</inline-formula> - bias of the respective gates, <inline-formula id="inf10">
<mml:math id="m20">
<mml:mrow>
<mml:msub>
<mml:mi>G</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>- Cell state, <inline-formula id="inf11">
<mml:math id="m21">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>-forward flow information, <inline-formula id="inf12">
<mml:math id="m22">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x20d6;</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>-backward flow information, <inline-formula id="inf13">
<mml:math id="m23">
<mml:mrow>
<mml:msub>
<mml:mi>P</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>- Output Vector.</p>
<p>Selecting an activation function <inline-formula id="inf14">
<mml:math id="m24">
<mml:mrow>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mi>s</mml:mi>
<mml:mi>i</mml:mi>
<mml:mi>g</mml:mi>
</mml:mrow>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> and <inline-formula id="inf15">
<mml:math id="m25">
<mml:mrow>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>tanh</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> is essential for managing the non-linearity of the output. The forward LSTM <inline-formula id="inf16">
<mml:math id="m26">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x2192;</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> has access to historical data, but the backward LSTM <inline-formula id="inf17">
<mml:math id="m27">
<mml:mrow>
<mml:msub>
<mml:mover accent="true">
<mml:mi>Q</mml:mi>
<mml:mo>&#x20d6;</mml:mo>
</mml:mover>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> represents the model&#x2019;s memory at time step n. The model may produce predictions based on both temporal contexts by combining these hidden states. The BiLSTM may determine dependencies from both directions in the sequence due to this bidirectional structure. The degree to which the input affects the current hidden state is determined by this <inline-formula id="inf18">
<mml:math id="m28">
<mml:mrow>
<mml:mi>V</mml:mi>
</mml:mrow>
</mml:math>
</inline-formula> matrix. The impact of the hidden state from the previous time step on the present one is determined by this <inline-formula id="inf19">
<mml:math id="m29">
<mml:mrow>
<mml:mi>W</mml:mi>
</mml:mrow>
</mml:math>
</inline-formula> matrix. The output layer&#x2019;s bias term <inline-formula id="inf20">
<mml:math id="m30">
<mml:mrow>
<mml:mi>B</mml:mi>
</mml:mrow>
</mml:math>
</inline-formula> enables the final prediction to change by the patterns the model has learned. It considers the context of the future. The model may generate predictions based on both temporal contexts by combining these hidden states. <xref ref-type="disp-formula" rid="e1">Equations 1</xref>&#x2013;<xref ref-type="disp-formula" rid="e10">10</xref> state the mathematical model of BiLSTM.</p>
</sec>
<sec id="s3-2">
<title>3.2 Bayesian optimisation algorithm</title>
<p>The Bayesian optimisation algorithm solves difficulties by finding the optimal parameters that minimise the objective function in a finite area, with lower and upper bounds on each variable, as given by <xref ref-type="disp-formula" rid="e11">Equation 11</xref> (<xref ref-type="bibr" rid="B17">Pelikan et al., 1999</xref>).<disp-formula id="e11">
<mml:math id="m31">
<mml:mrow>
<mml:msup>
<mml:mi>r</mml:mi>
<mml:mo>&#x2a;</mml:mo>
</mml:msup>
<mml:mo>&#x3d;</mml:mo>
<mml:munder>
<mml:mi>argmin</mml:mi>
<mml:mrow>
<mml:mi>r</mml:mi>
<mml:mo>&#x2208;</mml:mo>
<mml:mi>R</mml:mi>
</mml:mrow>
</mml:munder>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:mi>r</mml:mi>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(11)</label>
</disp-formula>
</p>
<p>Where, <inline-formula id="inf21">
<mml:math id="m32">
<mml:mrow>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:mi>r</mml:mi>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
</inline-formula>-a score that should be minimised, R-domain of the hyperparameter values, and r -the combination of hyperparameters that yields the lowermost value of the score <inline-formula id="inf22">
<mml:math id="m33">
<mml:mrow>
<mml:mi>f</mml:mi>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:mi>r</mml:mi>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
</inline-formula>.</p>
<p>Two fundamental elements that constitute the Bayesian Optimisation Algorithm (BOA):.<list list-type="simple">
<list-item>
<p>&#x2022; Probabilistic (surrogate) framework: BAYES&#x2019; THEOREM serves as the basis for BOA, which approximates the objective function via a surrogate framework in each iteration efficiently sampled. A Gaussian process serves as the best stand-in framework for choosing a desirable group of hyperparameters to assess the actual objective function. The objective function is determined using a surrogate framework and utilised to direct upcoming sampling.</p>
</list-item>
<list-item>
<p>&#x2022; Acquisition function: The acquisition function intends to discover the most promising group of hyperparameters that should be assessed next by using Bayesian knowledge to determine the optimal observation point in each iteration and propose a new sampling point. The processes of exploration and extraction are balanced through the acquisition function. Exploitation concentrates on areas of the search space that have a greater chance of yielding an improved solution based on the present surrogate framework, whereas exploration pursues less-explored areas of the search space. The Bayesian Optimisation Algorithm Pseudocode is shown in <xref ref-type="table" rid="T1">Table 1</xref>.</p>
</list-item>
</list>
</p>
<table-wrap id="T1" position="float">
<label>TABLE 1</label>
<caption>
<p>Bayesian optimisation algorithm pseudocode.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left">Bayesian optimisation algorithm</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">&#x2003;1. Commence: The objective function is to minimise error and identify the optimal hyperparameters of deep BiLSTM.<break/>&#x2003;2. Randomly choose the initial samples<break/>&#x2003;3. Build the surrogate framework and acquisition function to calculate fitness concerning the considered samples<break/>&#x2003;4. Perform the looping operation<break/>&#x2003;&#x2003;1. Introduce the acquisition function to supplement the additional samples<break/>&#x2003;&#x2003;2. Compute the surrogate<break/>&#x2003;&#x2003;3. Update the surrogate framework to provide a posterior framework<break/>&#x2003;5. Terminate</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s3-3">
<title>3.3 Proposed BOA-D-BiLSTM</title>
<p>This section is devoted to describing the proposed framework. Deep BiLSTM hyperparameter tweaking is significant in achieving the best model performance. The disadvantages of automatic optimisation strategies like grid and random search are that they take a long time for more giant parameter sets and do not always provide the best finding, respectively. In contrast, the Bayesian optimisation algorithm (BOA) is a well-informed method that evaluates simply the most promising models using a surrogate framework. With fewer sampling points and a quicker computing time, the Bayes theorem is applied to construct the posterior distribution of the objective function.</p>
<p>To enhance the long-term prediction of GHI, the best hyperparameters for deep BiLSTM, such as its number of hidden layers, hidden nodes, learning rate, regularisation, and other significant hyperparameters, were found using the Bayesian optimisation method (BOA). The Bayesian optimisation algorithm was used to optimise Deep BiLSTM concerning the optimal and promising hyperparameter and the performance evaluated on the two datasets for the GHI prediction in the long-term horizon. <xref ref-type="fig" rid="F2">Figure 2</xref> illustrates the proposed BOA-D-BiLSTM prediction model workflow and model built according to the set values and parameters presented in <xref ref-type="table" rid="T2">Table 2</xref>. The author uses 10 times the number of hyperparameters optimised as the initial samples and then processed with complete samples. The convergence is determined by the minimisation of the MSE.</p>
<fig id="F2" position="float">
<label>FIGURE 2</label>
<caption>
<p>The proposed workflow of the BOA-D-BiLSTM prediction model.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g002.tif"/>
</fig>
<table-wrap id="T2" position="float">
<label>TABLE 2</label>
<caption>
<p>Range set of hyperparameters for the Deep BiLSTM and Bayesian optimisation algorithm parameters.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left">Deep BiLSTM hyperparameters</th>
<th align="left">Range sets</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">Number of Stack</td>
<td align="left">1 to 3</td>
</tr>
<tr>
<td align="left">Number of Layers</td>
<td align="left">1 to 4</td>
</tr>
<tr>
<td align="left">Number of hidden nodes</td>
<td align="left">1 to 300</td>
</tr>
<tr>
<td align="left">Batch size</td>
<td align="left">16 to 128</td>
</tr>
<tr>
<td align="left">Dropout</td>
<td align="left">0.1 to 0.5</td>
</tr>
<tr>
<td align="left">Activation function</td>
<td align="left">&#x2018;relu&#x2019;, &#x2018;tanh&#x2019;, &#x2018;sigmoid&#x27;</td>
</tr>
<tr>
<td align="left">Optimizer</td>
<td align="left">&#x2018;adam&#x2019;, &#x2018;rmsprop&#x2019;, &#x2018;sgd&#x27;</td>
</tr>
<tr>
<td align="left">Initial Learning Rate</td>
<td align="left">1e-5 to 1e-2</td>
</tr>
<tr>
<td align="left">L2 Regularization</td>
<td align="left">1e-5 to 1e-15</td>
</tr>
<tr>
<td align="left">Epoch</td>
<td align="left">10 to 300</td>
</tr>
</tbody>
</table>
<table>
<thead>
<tr>
<th align="left">Bayesian Optimisation Algorithm</th>
<th align="left">Parameters</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">Objective function</td>
<td align="left">Minimisation of MSE</td>
</tr>
<tr>
<td align="left">Iterations</td>
<td align="left">200</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
</sec>
<sec id="s4">
<title>4 Experimental simulation details</title>
<p>The proposed BOA-D-BiLSTM prediction model and other models from the literature are used in the hp laptop system configuration of the AMD Ryzen 5 3550H processor, RAM: 8&#xa0;GB, 2100&#xa0;Mhz and GPU: 4&#xa0;GB. The proposed Deep BiLSTM hyperparameters, namely, the number of stacks, number of hidden layers, number of hidden nodes, learning rate, dropout rate, batch size, optimiser, activation function, and L2 regularisation, are optimally fixed using the Bayesian optimisation algorithm. The Bayesian optimisation algorithm is based on identified optimal hyperparameters associated with prediction model performance assessed based on the India region&#x2019;s two datasets (Datasets 1 and 2).</p>
<sec id="s4-1">
<title>4.1 Data source</title>
<p>Environmental variables such as temperature, wind speed, pressure, relative humidity, cloud cover, precipitation of water content, wind direction, dew point, and sun irradiance (GHI and DNI (Direct Normal Irradiance (DNI)) are closely related to solar energy fluctuations. <xref ref-type="table" rid="T3">Table 3</xref> shows the statistics of the dataset. This research experiment used data from India&#x2019;s two regions, namely, Chennai and Jammu. We used two data sets of Chennai and Jammu locations in India that contain 10&#xa0;years of datasets consisting of GHI and other environmental variables for our investigation and simulation evaluation. The daily resolution for each meteorological parameter (Dataset 1 and Dataset 2) is included in the data, encompassing 10&#xa0;years from 01 January 2013 to 31 December 2022 hourly collected data, which is obtained from the NOAA (National Oceanic and Atmospheric Administration).</p>
<table-wrap id="T3" position="float">
<label>TABLE 3</label>
<caption>
<p>Dataset statistics.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th align="left">Input variables</th>
<th align="left">Data size</th>
<th align="center">Range</th>
<th align="center">Units</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">Temperature</td>
<td align="left">87,600</td>
<td align="left">8&#x2013;45</td>
<td align="left">(&#xb0;C)</td>
</tr>
<tr>
<td align="left">Wind speed</td>
<td align="left">87,600</td>
<td align="left">3&#x2013;12</td>
<td align="left">(m/s)</td>
</tr>
<tr>
<td align="left">Pressure</td>
<td align="left">87,600</td>
<td align="left">990&#x2013;1,017</td>
<td align="left">(mbar)</td>
</tr>
<tr>
<td align="left">Relative humidity</td>
<td align="left">87,600</td>
<td align="left">65&#x2013;100</td>
<td align="left">(%)</td>
</tr>
<tr>
<td align="left">Cloud cover</td>
<td align="left">87,600</td>
<td align="left">0&#x2013;11</td>
<td align="left">(oktas)</td>
</tr>
<tr>
<td align="left">Precipitation of water content</td>
<td align="left">87,600</td>
<td align="left">0&#x2013;98</td>
<td align="left">(%)</td>
</tr>
<tr>
<td align="left">Wind direction</td>
<td align="left">87,600</td>
<td align="left">0&#x2013;360</td>
<td align="left">(Degree)</td>
</tr>
<tr>
<td align="left">Dew point</td>
<td align="left">87,600</td>
<td align="left">18&#x2013;41</td>
<td align="left">(&#xb0;C)</td>
</tr>
<tr>
<td align="left">GHI</td>
<td align="left">87,600</td>
<td align="left">0&#x2013;1,031</td>
<td align="left">(W/m2)</td>
</tr>
<tr>
<td align="left">DNI</td>
<td align="left">87,600</td>
<td align="left">0&#x2013;1,200</td>
<td align="left">(W/m2)</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s4-2">
<title>4.2 Normalisation</title>
<p>In this investigation, the collected real-time data was processed in the range (0, 1) using the Min-Max normalisation approach, as represented by the <xref ref-type="disp-formula" rid="e12">Equation 12</xref>.<disp-formula id="e12">
<mml:math id="m34">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mn>0</mml:mn>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>min</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>max</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>min</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:math>
<label>(12)</label>
</disp-formula>
</p>
<p>Where, <inline-formula id="inf23">
<mml:math id="m35">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> are the values of R after normalisation, <inline-formula id="inf24">
<mml:math id="m36">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mn>0</mml:mn>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> is the current value for variable R, <inline-formula id="inf25">
<mml:math id="m37">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>min</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> and <inline-formula id="inf26">
<mml:math id="m38">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>min</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> are the minimum and maximum data points in the variable R of the input dataset.</p>
</sec>
<sec id="s4-3">
<title>4.3 Training and testing data sets</title>
<p>The proposed models possess the ten input nodes: GHI, DNI, temperature, wind speed, pressure, relative humidity, cloud cover, precipitation of water content, wind direction, and dew point. The predicted GHI is the output node. The collected two region datasets (1 and 2) consist of 87,000 data points of all considered inputs, which are separated into training and testing sets based on the ratio 70:30, respectively.</p>
</sec>
<sec id="s4-4">
<title>4.4 Evaluation error metrics</title>
<p>This paper used RMSE (Root Mean Squared Error), R<sup>2</sup> (R-squared), MSE (Mean Squared Error) and MAE (Mean Absolute Error) as evaluation error metrics to evaluate the proposed Bayesian Optimisation algorithm-based Optimised Deep Bidirectional Long Short-Term Memory (BOA-D-BiLSTM) and other baseline prediction model performance. <xref ref-type="disp-formula" rid="e13">Equations 13</xref>&#x2013;<xref ref-type="disp-formula" rid="e16">16</xref> state the evaluation metric formulations.<disp-formula id="e13">
<mml:math id="m39">
<mml:mrow>
<mml:mtext>RMSE</mml:mtext>
<mml:mo>&#x3d;</mml:mo>
<mml:msqrt>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:mfrac>
<mml:mrow>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mrow>
<mml:mi>N</mml:mi>
</mml:mrow>
</mml:mfrac>
<mml:mstyle displaystyle="true">
<mml:munderover>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:munderover>
</mml:mstyle>
<mml:msup>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>P</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:msqrt>
</mml:mrow>
</mml:math>
<label>(13)</label>
</disp-formula>
<disp-formula id="e14">
<mml:math id="m40">
<mml:mrow>
<mml:mtext>MAE</mml:mtext>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mrow>
<mml:mi>N</mml:mi>
</mml:mrow>
</mml:mfrac>
<mml:mrow>
<mml:mstyle displaystyle="true">
<mml:munderover>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:munderover>
</mml:mstyle>
<mml:mrow>
<mml:mfenced open="|" close="|" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>P</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(14)</label>
</disp-formula>
<disp-formula id="e15">
<mml:math id="m41">
<mml:mrow>
<mml:mtext>MSE</mml:mtext>
<mml:mo>&#x3d;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mrow>
<mml:mi>N</mml:mi>
</mml:mrow>
</mml:mfrac>
<mml:mstyle displaystyle="true">
<mml:munderover>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:munderover>
</mml:mstyle>
<mml:msup>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>P</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:math>
<label>(15)</label>
</disp-formula>
<disp-formula id="e16">
<mml:math id="m42">
<mml:mrow>
<mml:msup>
<mml:mi mathvariant="normal">R</mml:mi>
<mml:mn>2</mml:mn>
</mml:msup>
<mml:mo>&#x3d;</mml:mo>
<mml:mrow>
<mml:mfenced open="[" close="]" separators="|">
<mml:mrow>
<mml:mn>1</mml:mn>
<mml:mo>&#x2212;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mstyle displaystyle="true">
<mml:munderover>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:munderover>
</mml:mstyle>
<mml:msup>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>P</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
<mml:mrow>
<mml:mstyle displaystyle="true">
<mml:munderover>
<mml:mo>&#x2211;</mml:mo>
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x3d;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:munderover>
</mml:mstyle>
<mml:msup>
<mml:mrow>
<mml:mfenced open="(" close=")" separators="|">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:mover accent="true">
<mml:mi>R</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
<mml:mn>2</mml:mn>
</mml:msup>
</mml:mrow>
</mml:mfrac>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mrow>
</mml:math>
<label>(16)</label>
</disp-formula>
</p>
<p>Let, N - the total number of the samples, <inline-formula id="inf27">
<mml:math id="m43">
<mml:mrow>
<mml:msub>
<mml:mi>R</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula> - the real target GHI, <inline-formula id="inf28">
<mml:math id="m44">
<mml:mrow>
<mml:mover accent="true">
<mml:mi>R</mml:mi>
<mml:mo>&#xaf;</mml:mo>
</mml:mover>
</mml:mrow>
</mml:math>
</inline-formula>- the mean target GHI, and <inline-formula id="inf29">
<mml:math id="m45">
<mml:mrow>
<mml:msub>
<mml:mi>P</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
</mml:mrow>
</mml:math>
</inline-formula>- the predicted GHI.</p>
</sec>
</sec>
<sec sec-type="results|discussion" id="s5">
<title>5 Results and discussion</title>
<p>Global solar irradiance (GHI) prediction is a vital problem in photovoltaic systems for vendors and power system engineers. The proposed BOA-D-BiLSTM and other baseline models were simulated on MATLAB R2024a. The detailed proposed BOA-D-BiLSTM prediction model result analysis and discussion regarding traditional optimisation methods (grid search and Random search) and comparative result assessment with the other eight baseline models are presented as follows.</p>
<sec id="s5-1">
<title>5.1 Comparative analysis with other traditional optimisation methods for long-term prediction of global horizontal irradiance</title>
<p>Grid and random search are the traditional methods used to identify the optimal parameter sets and comparative analysis. In the context of the two data sets from the India region, this research constitutes a deep bidirectional long short term memory network that is resilient and optimised for long-term global horizontal radiation (GHI) prediction. The results achieved are tabulated in <xref ref-type="table" rid="T4">Table 4</xref>. From experimentation observation, it was noticed that grid searches are notoriously time-consuming. The trial-and-error method results in uncertain efficiency, and the grid search is exorbitant in computational complexity. In order to optimally use the network&#x2019;s capability, hyperparameters must be appropriately configured. Grid Search and Random Search (RS) are common approaches for hyperparameter optimisation. Still, they are computationally costly, may take a long time to analyse, may result in inappropriate hyperparameters, and produce an extensive variance during computation. The advantage of Bayesian optimisation algorithms is that they prevent being trapped in the optimal solution.</p>
<table-wrap id="T4" position="float">
<label>TABLE 4</label>
<caption>
<p>Proposed Deep BiLSTM Optimised with Bayesian optimisation algorithm comparative analysis with other traditional optimisation methods.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th rowspan="2" align="center">Deep BiLSTM with optimisation methods</th>
<th rowspan="2" align="center">Datasets</th>
<th colspan="4" align="center">Evaluation error metrics</th>
</tr>
<tr>
<th align="left">RMSE</th>
<th align="left">MAE</th>
<th align="center">MSE</th>
<th align="center">R<sup>2</sup>
</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td rowspan="2" align="left">Grid Search (<xref ref-type="bibr" rid="B3">Bergstra and Bengio, 2012</xref>)</td>
<td align="left">Dataset 1</td>
<td align="left">0.2037</td>
<td align="left">0.1424</td>
<td align="left">0.0415</td>
<td align="left">0.9534</td>
</tr>
<tr>
<td align="left">Dataset 2</td>
<td align="left">0.2869</td>
<td align="left">0.1842</td>
<td align="left">0.0823</td>
<td align="left">0.9401</td>
</tr>
<tr>
<td rowspan="2" align="left">Random Search (<xref ref-type="bibr" rid="B3">Bergstra and Bengio, 2012</xref>)</td>
<td align="left">Dataset 1</td>
<td align="left">0.0063</td>
<td align="left">0.0047</td>
<td align="left">3.9543 &#xd7; 10<sup>&#x2212;05</sup>
</td>
<td align="left">0.9854</td>
</tr>
<tr>
<td align="left">Dataset 2</td>
<td align="left">0.0097</td>
<td align="left">0.0070</td>
<td align="left">9.4263 &#xd7; 10<sup>&#x2212;05</sup>
</td>
<td align="left">0.9810</td>
</tr>
<tr>
<td rowspan="2" align="left">Bayesian Optimisation Algorithm</td>
<td align="left">Dataset 1</td>
<td align="left">
<bold>0.0026</bold>
</td>
<td align="left">
<bold>0.0016</bold>
</td>
<td align="left">
<bold>6.6852 &#xd7; 10</bold>
<sup>
<bold>&#x2212;06</bold>
</sup>
</td>
<td align="left">
<bold>0.9994</bold>
</td>
</tr>
<tr>
<td align="left">Dataset 2</td>
<td align="left">
<bold>0.0030</bold>
</td>
<td align="left">
<bold>0.0018</bold>
</td>
<td align="left">
<bold>8.8628 &#xd7; 10</bold>
<sup>
<bold>&#x2212;06</bold>
</sup>
</td>
<td align="left">
<bold>0.9988</bold>
</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>The bold implies the best result.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>The Bayesian optimisation algorithm optimises the Deep BiLSTM hyperparameters to overcome the traditional optimisation limitations. The findings suggest that the models&#x2019; GHI prediction capabilities are considerably enhanced by adjusting the hyperparameters using random search, grid search, and Bayesian optimisation algorithms. The experimental results show that compared to the grid search, random search-based identified hyperparameters-based Deep BiLSTM results are better but do not surpass the Bayesian optimisation algorithm-based identified hyperparameters-based BiLSTM prediction model for long-term solar GHI prediction application. Random search-based BiLSTM achieves reduced RMSE, MAE, MSE, and R2 compared to grid search-based BiLSTM, but it is not supreme compared to Bayesian optimisation algorithm-based BiLSTM. As the number of hyperparameters rises, grid search algorithms become less effective, and computation time complexity becomes problematic. The approach employs the Bayesian optimisation algorithm to discover the optimal combination in an acceptable time. Moreover, the prediction performance of BiLSTM is greatly enhanced by tweaking hyperparameters during training and testing. With this, it was concluded that the optimised hyperparameter based on the proposed Bayesian optimisation algorithm incurred BiLSTM outperforms in terms of better performance and faster convergence. The Bayesian optimisation algorithm outperforms grid search and random search because it can intelligently navigate complex, high-dimensional parameter spaces with few iterations. Therefore, the authors considered the Bayesian optimisation algorithm accurate and simple compared to the traditional hyperparameter fine-tuning method, which was clearly inferred from <xref ref-type="fig" rid="F3">Figure 3</xref>. The identified significant hyperparameters based on the Bayesian optimisation algorithm based identified significant hyperparameters are tabulated in <xref ref-type="table" rid="T5">Table 5</xref>. Finding the best set of parameters in an acceptable amount of time may be accomplished through hyperparameter tweaking using Bayesian optimisation. Additionally, it benefits in reducing model overfitting problems.</p>
<fig id="F3" position="float">
<label>FIGURE 3</label>
<caption>
<p>Comparative analysis of the 3D column of proposed BOA-D-BiLSTM with other traditional optimisation methods.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g003.tif"/>
</fig>
<table-wrap id="T5" position="float">
<label>TABLE 5</label>
<caption>
<p>The proposed Bayesian optimisation algorithm identified the optimal significant hyperparameter of the Deep BiLSTM.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th rowspan="2" align="left">Datasets</th>
<th colspan="8" align="center">Identified optimal significant hyperparameters</th>
</tr>
<tr>
<th align="left">Number of BiLSTM stacks</th>
<th align="left">BiLSTM hidden layer</th>
<th align="left">Number of nodes</th>
<th align="left">Initial learning rate</th>
<th align="left">Dropout rate</th>
<th align="left">Optimizer</th>
<th align="left">Batch size</th>
<th align="left">L2 regularization</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td align="left">Dataset 1</td>
<td align="left">2</td>
<td align="left">2</td>
<td align="left">50, 41</td>
<td align="left">0.01015</td>
<td align="left">0.3</td>
<td align="left">Adam</td>
<td align="left">32</td>
<td align="left">118,58 &#xd7; 10<sup>&#x2212;06</sup>
</td>
</tr>
<tr>
<td align="left">Dataset 2</td>
<td align="left">2</td>
<td align="left">2</td>
<td align="left">84, 28</td>
<td align="left">0.010034</td>
<td align="left">0.3</td>
<td align="left">Adam</td>
<td align="left">32</td>
<td align="left">9.0466 &#xd7; 10<sup>&#x2212;10</sup>
</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="s5-2">
<title>5.2 Comparative analysis of proposed BOA-D-BiLSTM with other baseline models</title>
<p>In existing global horizontal prediction applications research, many prediction models were suggested using baseline models such as the persistence model, ARIMA (Autoregressive integrated moving average), RNN (recurrent neural network), BPN (backpropagation neural network), SVR (Support Vector Regression), Boosted Tree, LSTM (Long Short Term Memory) and BiLSTM (Bidirectional Long Short Term Memory). All these models predict values based on the present and past information, and the selection of hyperparameters is not optimal. Still, in real-time applications, consideration of future information along with past and present and optimal hyperparameter-associated neural networks leads to improved prediction performance. To address this issue, the paper uses the deep Bi-LSTM with the Bayesian optimisation algorithm-based optimised hyperparameters to predict the GHI in the long-term horizon time scale. The results of the proposed BOA-D-BiLSTM investigations are examined in the configuration as per <xref ref-type="table" rid="T5">Table 5</xref>; listed hyperparameters and other parameters of the baseline model are considered as per the literature but evaluated on the India region. The results attained are tabulated in <xref ref-type="table" rid="T6">Table 6</xref>.</p>
<table-wrap id="T6" position="float">
<label>TABLE 6</label>
<caption>
<p>Proposed Bayesian Optimisation algorithm-based Optimised Deep Bidirectional Long Short Term Memory (BOA-D-BiLSTM) two datasets based on comparative analysis with the baseline prediction models.</p>
</caption>
<table>
<thead valign="top">
<tr>
<th rowspan="2" align="center">Prediction models</th>
<th rowspan="2" align="center">Dataset</th>
<th colspan="4" align="center">Evaluation error metric</th>
</tr>
<tr>
<th align="center">RMSE</th>
<th align="center">MAE</th>
<th align="center">MSE</th>
<th align="center">R<sup>2</sup>
</th>
</tr>
</thead>
<tbody valign="top">
<tr>
<td rowspan="2" align="left">Persistence (<xref ref-type="bibr" rid="B20">Xiang et al., 2021</xref>)</td>
<td align="left">1</td>
<td align="left">23.9313</td>
<td align="left">22.4023</td>
<td align="left">572.7094</td>
<td align="left">&#x2212;10.2399</td>
</tr>
<tr>
<td align="left">2</td>
<td align="left">20.3610</td>
<td align="left">17.7686</td>
<td align="left">414.5708</td>
<td align="left">&#x2212;9.9018</td>
</tr>
<tr>
<td rowspan="2" align="left">ARIMA (<xref ref-type="bibr" rid="B5">Chodakowska et al., 2023</xref>)</td>
<td align="left">1</td>
<td align="left">0.4463</td>
<td align="left">0.4086</td>
<td align="left">0.1991</td>
<td align="left">&#x2212;1.0824</td>
</tr>
<tr>
<td align="left">2</td>
<td align="left">0.2565</td>
<td align="left">0.1827</td>
<td align="left">0.0658</td>
<td align="left">&#x2212;0.0592</td>
</tr>
<tr>
<td rowspan="2" align="left">BPN (<xref ref-type="bibr" rid="B11">Madhiarasan, 2018</xref>)</td>
<td align="left">1</td>
<td align="left">0.5579</td>
<td align="left">0.5016</td>
<td align="left">0.3113</td>
<td align="left">&#x2212;2.2549</td>
</tr>
<tr>
<td align="left">2</td>
<td align="left">0.2508</td>
<td align="left">0.1908</td>
<td align="left">0.0629</td>
<td align="left">&#x2212;0.0123</td>
</tr>
<tr>
<td rowspan="2" align="left">RNN (<xref ref-type="bibr" rid="B21">Yu et al., 2019</xref>)</td>
<td align="left">1</td>
<td align="left">0.0714</td>
<td align="left">0.0547</td>
<td align="left">0.0051</td>
<td align="left">0.9496</td>
</tr>
<tr>
<td align="left">2</td>
<td align="left">0.0819</td>
<td align="left">0.0591</td>
<td align="left">0.0067</td>
<td align="left">0.9402</td>
</tr>
<tr>
<td rowspan="2" align="left">SVR (<xref ref-type="bibr" rid="B16">Mohammadi et al., 2015</xref>)</td>
<td align="left">1</td>
<td align="left">0.0742</td>
<td align="left">0.0576</td>
<td align="left">0.0055</td>
<td align="left">0.9424</td>
</tr>
<tr>
<td align="left">2</td>
<td align="left">0.0849</td>
<td align="left">0.0708</td>
<td align="left">0.0072</td>
<td align="left">0.8827</td>
</tr>
<tr>
<td rowspan="2" align="left">Boosted Tree (<xref ref-type="bibr" rid="B18">Persson et al., 2017</xref>)</td>
<td align="left">1</td>
<td align="left">0.0592</td>
<td align="left">0.0323</td>
<td align="left">0.0035</td>
<td align="left">0.9628</td>
</tr>
<tr>
<td align="left">2</td>
<td align="left">0.0663</td>
<td align="left">0.0342</td>
<td align="left">0.0044</td>
<td align="left">0.9575</td>
</tr>
<tr>
<td rowspan="2" align="left">LSTM (<xref ref-type="bibr" rid="B21">Yu et al., 2019</xref>)</td>
<td align="left">1</td>
<td align="left">0.0588</td>
<td align="left">0.0305</td>
<td align="left">0.0034</td>
<td align="left">0.9638</td>
</tr>
<tr>
<td align="left">2</td>
<td align="left">0.0671</td>
<td align="left">0.0387</td>
<td align="left">0.0045</td>
<td align="left">0.9570</td>
</tr>
<tr>
<td rowspan="2" align="left">BiLSTM (<xref ref-type="bibr" rid="B4">Bou-Rabee et al., 2022</xref>)</td>
<td align="left">1</td>
<td align="left">0.0548</td>
<td align="left">0.0285</td>
<td align="left">0.0030</td>
<td align="left">0.9656</td>
</tr>
<tr>
<td align="left">2</td>
<td align="left">0.0624</td>
<td align="left">0.0352</td>
<td align="left">0.0039</td>
<td align="left">0.9599</td>
</tr>
<tr>
<td rowspan="2" align="left">Proposed BOA-D-BiLSTM</td>
<td align="left">
<bold>1</bold>
</td>
<td align="left">
<bold>0.0026</bold>
</td>
<td align="left">
<bold>0.0016</bold>
</td>
<td align="left">
<bold>6.6852 &#xd7; 10</bold>
<sup>
<bold>&#x2212;06</bold>
</sup>
</td>
<td align="left">
<bold>0.9994</bold>
</td>
</tr>
<tr>
<td align="left">
<bold>2</bold>
</td>
<td align="left">
<bold>0.0030</bold>
</td>
<td align="left">
<bold>0.0018</bold>
</td>
<td align="left">
<bold>8.8628 &#xd7; 10</bold>
<sup>
<bold>&#x2212;06</bold>
</sup>
</td>
<td align="left">
<bold>0.9988</bold>
</td>
</tr>
</tbody>
</table>
<table-wrap-foot>
<fn>
<p>Bold implies the best result.</p>
</fn>
</table-wrap-foot>
</table-wrap>
<p>The suggested BOA-D-BiLSTM model performed better than the baseline models when assessed using the MAE, RMSE, MSE and R<sup>2</sup>. In this research, using the Bayesian optimisation approach, the author determines the best Deep BiLSTM hyperparameter values that lead to greater model precision, as noted in <xref ref-type="fig" rid="F4">Figures 4</xref>&#x2013;<xref ref-type="fig" rid="F9">9</xref>. The combined use of deep BiLSTM and Bayesian optimisation algorithm in the proposed model yields better-reduced error metrics than the benchmark models, as demonstrated by our comparative analysis and experiment results. The persistence model prediction performance does not produce effective results with high evaluation error metrics compared to other prediction models. ARIMA and BPN models compete for dataset 1, where ARIMA performs better than BPN, and for dataset 2, where BPN performs better than ARIMA. RNN performs better than SVR, BPN, ARIMA and persistence.</p>
<fig id="F4" position="float">
<label>FIGURE 4</label>
<caption>
<p>
<bold>(A)</bold> Comparison of the predicted GHI and the actual target GHI for the Chennai region (Dataset 1). <bold>(B)</bold> Comparison of the predicted GHI and the actual target GHI for the Chennai region (Dataset 1) with the Zoomed-in section view. <bold>(C)</bold> Comparison of the predicted GHI and the actual target GHI for the Chennai region (Dataset 1) with the Zoomed-in section view.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g004.tif"/>
</fig>
<fig id="F5" position="float">
<label>FIGURE 5</label>
<caption>
<p>
<bold>(A)</bold> Prediction Error vs. Data Points for Dataset 1. <bold>(B)</bold> Prediction Error vs. Data Points for Dataset 1 Zoomed-in section view. <bold>(C)</bold> Prediction Error vs. Data Points for Dataset 1 Zoomed-in section view.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g005.tif"/>
</fig>
<fig id="F6" position="float">
<label>FIGURE 6</label>
<caption>
<p>
<bold>(A)</bold> Linear regression plot for the Dataset 1. <bold>(B)</bold> Linear regression plot for the Dataset 1 Zoomed-in section view.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g006.tif"/>
</fig>
<fig id="F7" position="float">
<label>FIGURE 7</label>
<caption>
<p>
<bold>(A)</bold> Comparison of predicted GHI and actual target GHI for Jammu region (Dataset 2). <bold>(B)</bold> Comparison of predicted GHI and actual target GHI for Jammu region (Dataset 2) Zoomed-in section view. <bold>(C)</bold> Comparison of predicted GHI and actual target GHI for Jammu region (Dataset 2) Zoomed-in section view.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g007.tif"/>
</fig>
<fig id="F8" position="float">
<label>FIGURE 8</label>
<caption>
<p>
<bold>(A)</bold> Prediction Error vs. Data Points for Dataset 2. <bold>(B)</bold> Prediction Error vs. Data Points for Dataset 2 Zoomed-in section view. <bold>(C)</bold> Prediction Error vs. Data Points for Dataset 2 Zoomed-in section view.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g008.tif"/>
</fig>
<fig id="F9" position="float">
<label>FIGURE 9</label>
<caption>
<p>
<bold>(A)</bold> Linear regression plot for the Dataset 2. <bold>(B)</bold> Linear regression plot for the Dataset 2 Zoomed-in section view.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g009.tif"/>
</fig>
<p>LSTM has a reduced amount of evaluation metrics than Boosted Tree, but BiLSTM performs better than LSTM, Boosted Tree, SVR, BPN, ARIMA and persistence regarding reduced error. For linear and stationary time series, ARIMA works well; however, it cannot effectively represent the complex, non-linear patterns encountered in GHI data. Although non-linear relationships can be captured by simple LSTMs, such models frequently rely on human expertise or assistance for hyperparameter adjustment, which is challenging and not an optimal option for large-scale applications. To overcome the drawbacks of simpler models like ARIMA and basic LSTMs, the BOA-D-BiLSTM model incorporates the Bayesian Optimisation Algorithm (BOA). The BOA-D-BiLSTM model&#x2019;s incorporation of BOA ensures optimal configurations with low computing complexity by optimising hyperparameter tweaking. Effectively handling varied and high-dimensional datasets increases scalability in addition to improving forecast accuracy. Regarding predicting, the Deep BiLSTM structure outperforms simpler models by capturing temporal dependencies in both forward and backward directions. The validity and generalisation ability are proved on the two different regions&#x2019; data sets. The proposed model performed well and had improved accuracy for both datasets. BOA-D-BiLSTM for the India region dataset 1 achieves MAE: 0.0016, RMSE: 0.0023, MSE: 6.6852 &#xd7; 10<sup>&#x2212;06</sup> and R<sup>2</sup>: 0.9994 meanwhile for dataset 2-based evaluations, achieve MAE: 0.0018, RMSE: 0.0030, MSE: 8.8628 &#xd7; 10<sup>&#x2212;06</sup> and R<sup>2</sup>: 0.9988. As a result, <xref ref-type="table" rid="T6">Table 6</xref> demonstrates the feasibility of using a Bayesian optimisation approach to tune the Deep BiLSTM framework by identifying the perfect combination of hyperparameters that greatly enhances the proposed BOA-D-BiLSTM efficacy that leads the predicted values to exactly match with the actual target of GHI for the two data sets, as noted in <xref ref-type="fig" rid="F4">Figures 4A&#x2013;C</xref> and <xref ref-type="fig" rid="F7">Figures 7A&#x2013;C</xref> the zoomed-in view visualises the effectiveness of the proposed model in accurate prediction of GHI in the long-term horizon. Hence, prediction errors are near zero for both datasets 1 and 2, and it is clearly visualised with the help of the prediction error plot and the zoomed-in section view shown in <xref ref-type="fig" rid="F5">Figures 5A, B</xref> and <xref ref-type="fig" rid="F8">Figures 8A, B</xref>. The linear regression plot shows the linear relationship with R &#x3d; 1 for both datasets, as observed in <xref ref-type="fig" rid="F6">Figures 6A, B</xref> and <xref ref-type="fig" rid="F9">Figures 9A, B</xref>, respectively. Compared to the frequently used time series models, namely, the Persistence Model, ARIMA, BPN, RNN, SVR, Boosted Tree, LSTM, and BiLSTM, the proposed hybrid model BOA-D-BiLSTM can affirm the significant elements of the input information and also integrate forward and backward transitions to improve solar irradiance (GHI) prediction and produce the minor error than the eight baseline models, which is illustrated in <xref ref-type="fig" rid="F10">Figure 10</xref> for better visualisation of the efficacy of the proposed model.</p>
<fig id="F10" position="float">
<label>FIGURE 10</label>
<caption>
<p>The 2D stacked bar graph of the proposed BOA-D-BiLSTM model performance comparison with other baseline prediction models.</p>
</caption>
<graphic xlink:href="fenrg-13-1499751-g010.tif"/>
</fig>
</sec>
</sec>
<sec sec-type="conclusion" id="s6">
<title>6 Conclusion</title>
<p>However, the intermittent and non-linear character of solar energy and GHI makes integrating PV-produced energy with an electric grid a critical hurdle. The reliable and efficient operation of the photovoltaic system penetrated the grid, which was ensured by an accurate prediction of global solar irradiance (GHI). This paper proposed the long-term horizon global horizontal irradiance prediction using Deep Bidirectional long-term memory with Bayesian optimisation for automatic hyperparameter fine-tuning and fixation. Bayesian optimisation algorithms (BOA) were used to fine-tune the BiLSTM hyperparameters to improve prediction accuracy. The performance of the proposed BOA-D-BiLSTM prediction model is compared with the eight baseline prediction models using the evaluation error metrics (MSE, RMSE, MAE, and R<sup>2</sup>) to prove the performance validity. The comparative analysis confirms the suitability and outperformability of the proposed BOA-D-BiLSTM prediction model for the long-term GHI prediction application among the other contract prediction models. Further, the potential limitations and future direction are as follows.</p>
<p>Potential Limitation: The limitation of the Bayesian optimisation algorithm is that its run time is quite large when dealing with large amounts of data.</p>
<p>Future direction: The proposed model will be applied to real-time applications in future work. Furthermore, improvements concerning the computational cost and scalability for the real-time application extended using the transformer network, and significant parameters can be explored using the parallel versions of the Bayesian optimisation algorithm for the different horizon-based GHI predictions to speed up the time and overcome the existing potential limitation.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="s7">
<title>Data availability statement</title>
<p>The data analysed in this study was obtained from the National Oceanic and Atmospheric Administration, restrictions apply to get the datasets access. Requests to access these datasets should be directed to the National Oceanic and Atmospheric Administration official website <ext-link ext-link-type="uri" xlink:href="https://www.noaa.gov/">https://www.noaa.gov/</ext-link>.</p>
</sec>
<sec sec-type="author-contributions" id="s8">
<title>Author contributions</title>
<p>MM: Conceptualization, Data curation, Formal Analysis, Funding acquisition, Investigation, Methodology, Project administration, Resources, Software, Supervision, Validation, Visualization, Writing&#x2013;original draft, Writing&#x2013;review and editing.</p>
</sec>
<sec sec-type="funding-information" id="s9">
<title>Funding</title>
<p>The author(s) declare that no financial support was received for the research, authorship, and/or publication of this article.</p>
</sec>
<ack>
<p>The author expresses his sincere gratitude to NOAA for providing solar irradiance and meteorological data to substantiate and verify the performance and prove its applicability.</p>
</ack>
<sec sec-type="COI-statement" id="s10">
<title>Conflict of interest</title>
<p>The author declares that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="disclaimer" id="s11">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Alali</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Harrou</surname>
<given-names>F.</given-names>
</name>
<name>
<surname>Sun</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>A proficient approach to forecast COVID-19 spread via optimized dynamic machine learning models</article-title>. <source>Sci. Rep.</source> <volume>12</volume> (<issue>1</issue>), <fpage>2467</fpage>. <pub-id pub-id-type="doi">10.1038/s41598-022-06218-3</pub-id>
</citation>
</ref>
<ref id="B2">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Benamrou</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Ouardouz</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Allaouzi</surname>
<given-names>I.</given-names>
</name>
<name>
<surname>Ben Ahmed</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2020</year>). <article-title>A proposed model to forecast hourly global solar irradiation based on satellite derived data, deep learning and machine learning approaches</article-title>. <source>J. Ecol. Eng.</source> <volume>21</volume> (<issue>4</issue>), <fpage>26</fpage>&#x2013;<lpage>38</lpage>. <pub-id pub-id-type="doi">10.12911/22998993/119795</pub-id>
</citation>
</ref>
<ref id="B3">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bergstra</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Bengio</surname>
<given-names>Y.</given-names>
</name>
</person-group> (<year>2012</year>). <article-title>Random search for hyper-parameter optimisation</article-title>. <source>J. Mach. Learn. Res.</source> <volume>13</volume> (<issue>2</issue>).</citation>
</ref>
<ref id="B4">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Bou-Rabee</surname>
<given-names>M. A.</given-names>
</name>
<name>
<surname>Naz</surname>
<given-names>M. Y.</given-names>
</name>
<name>
<surname>Albalaa</surname>
<given-names>I. E.</given-names>
</name>
<name>
<surname>Sulaiman</surname>
<given-names>S. A.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>BiLSTM network-based approach for solar irradiance forecasting in continental climate zones</article-title>. <source>Energies</source> <volume>15</volume> (<issue>6</issue>), <fpage>2226</fpage>. <pub-id pub-id-type="doi">10.3390/en15062226</pub-id>
</citation>
</ref>
<ref id="B5">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Chodakowska</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Nazarko</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Nazarko</surname>
<given-names>&#x141;.</given-names>
</name>
<name>
<surname>Rabayah</surname>
<given-names>H. S.</given-names>
</name>
<name>
<surname>Abendeh</surname>
<given-names>R. M.</given-names>
</name>
<name>
<surname>Alawneh</surname>
<given-names>R.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Arima models in solar radiation forecasting in different geographic locations</article-title>. <source>Energies</source> <volume>16</volume> (<issue>13</issue>), <fpage>5029</fpage>. <pub-id pub-id-type="doi">10.3390/en16135029</pub-id>
</citation>
</ref>
<ref id="B6">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Graves</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Schmidhuber</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2005</year>). &#x201c;<article-title>Framewise phoneme classification with bidirectional LSTM networks</article-title>,&#x201d; in <source>Proceedings. 2005 IEEE international joint conference on neural networks, 2005</source> (<publisher-name>IEEE</publisher-name>), <volume>4</volume>, <fpage>2047</fpage>&#x2013;<lpage>2052</lpage>. <pub-id pub-id-type="doi">10.1109/ijcnn.2005.1556215</pub-id>
</citation>
</ref>
<ref id="B7">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Herrera-Casanova</surname>
<given-names>R.</given-names>
</name>
<name>
<surname>Conde</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Santos-P&#xe9;rez</surname>
<given-names>C.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Hour-ahead photovoltaic power prediction combining BiLSTM and bayesian optimization algorithm, with bootstrap resampling for interval predictions</article-title>. <source>Sensors</source> <volume>24</volume> (<issue>3</issue>), <fpage>882</fpage>. <pub-id pub-id-type="doi">10.3390/s24030882</pub-id>
</citation>
</ref>
<ref id="B8">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Jumin</surname>
<given-names>E.</given-names>
</name>
<name>
<surname>Basaruddin</surname>
<given-names>F. B.</given-names>
</name>
<name>
<surname>Yusoff</surname>
<given-names>Y. B. M.</given-names>
</name>
<name>
<surname>Latif</surname>
<given-names>S. D.</given-names>
</name>
<name>
<surname>Ahmed</surname>
<given-names>A. N.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Solar radiation prediction using boosted decision tree regression model: a case study in Malaysia</article-title>. <source>Environ. Sci. Pollut. Res.</source> <volume>28</volume>, <fpage>26571</fpage>&#x2013;<lpage>26583</lpage>. <pub-id pub-id-type="doi">10.1007/s11356-021-12435-6</pub-id>
</citation>
</ref>
<ref id="B9">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Krishnan</surname>
<given-names>N.</given-names>
</name>
<name>
<surname>Kumar</surname>
<given-names>K. R.</given-names>
</name>
<name>
<surname>R.</surname>
<given-names>S. A.</given-names>
</name>
</person-group> (<year>2024</year>). <article-title>Solar radiation forecasting using gradient boosting based ensemble learning model for various climatic zones</article-title>. <source>Sustain. Energy, Grids Netw.</source> <volume>38</volume>, <fpage>101312</fpage>. <pub-id pub-id-type="doi">10.1016/j.segan.2024.101312</pub-id>
</citation>
</ref>
<ref id="B10">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Li</surname>
<given-names>B.</given-names>
</name>
<name>
<surname>Shao</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Lian</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Li</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Lei</surname>
<given-names>Q.</given-names>
</name>
</person-group> (<year>2023</year>). <article-title>Bayesian optimization-based LSTM for short-term heating load forecasting</article-title>. <source>Energies</source> <volume>16</volume> (<issue>17</issue>), <fpage>6234</fpage>. <pub-id pub-id-type="doi">10.3390/en16176234</pub-id>
</citation>
</ref>
<ref id="B11">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Madhiarasan</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2018</year>). <source>Certain algebraic criteria for design of hybrid neural network models with applications in renewable energy forecasting</source>. <publisher-loc>Chennai, India</publisher-loc>: <publisher-name>Anna University</publisher-name>. <comment>Ph. D. Thesis</comment>.</citation>
</ref>
<ref id="B12">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Madhiarasan</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Louzazni</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Combined long short-term memory network-based short-term prediction of solar irradiance</article-title>. <source>Int. J. Photoenergy</source> <volume>2022</volume> (<issue>1</issue>), <fpage>1004051</fpage>&#x2013;<lpage>1004119</lpage>. <pub-id pub-id-type="doi">10.1155/2022/1004051</pub-id>
</citation>
</ref>
<ref id="B13">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Medina-Santana</surname>
<given-names>A. A.</given-names>
</name>
<name>
<surname>Hewamalage</surname>
<given-names>H.</given-names>
</name>
<name>
<surname>C&#xe1;rdenas-Barr&#xf3;n</surname>
<given-names>L. E.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Deep learning approaches for long-term global horizontal irradiance forecasting for microgrids planning</article-title>. <source>Designs</source> <volume>6</volume> (<issue>5</issue>), <fpage>83</fpage>. <pub-id pub-id-type="doi">10.3390/designs6050083</pub-id>
</citation>
</ref>
<ref id="B14">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Michael</surname>
<given-names>N. E.</given-names>
</name>
<name>
<surname>Hasan</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Al-Durra</surname>
<given-names>A.</given-names>
</name>
<name>
<surname>Mishra</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). <article-title>Short-term solar irradiance forecasting based on a novel Bayesian optimized deep Long Short-Term Memory neural network</article-title>. <source>Appl. Energy</source> <volume>324</volume>, <fpage>119727</fpage>. <pub-id pub-id-type="doi">10.1016/j.apenergy.2022.119727</pub-id>
</citation>
</ref>
<ref id="B15">
<citation citation-type="book">
<collab>Ministry of New and Renewable Energy</collab> (<year>2024</year>). <source>Solar overview</source>. <comment>Available at: <ext-link ext-link-type="uri" xlink:href="https://mnre.gov.in/solar/current-status">https://mnre.gov.in/solar/current-status</ext-link> (Accessed September 14, 2024)</comment>.</citation>
</ref>
<ref id="B16">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Mohammadi</surname>
<given-names>K.</given-names>
</name>
<name>
<surname>Shamshirband</surname>
<given-names>S.</given-names>
</name>
<name>
<surname>Anisi</surname>
<given-names>M. H.</given-names>
</name>
<name>
<surname>Alam</surname>
<given-names>K. A.</given-names>
</name>
<name>
<surname>Petkovi&#x107;</surname>
<given-names>D.</given-names>
</name>
</person-group> (<year>2015</year>). <article-title>Support vector regression based prediction of global solar radiation on a horizontal surface</article-title>. <source>Energy Convers. Manag.</source> <volume>91</volume>, <fpage>433</fpage>&#x2013;<lpage>441</lpage>. <pub-id pub-id-type="doi">10.1016/j.enconman.2014.12.015</pub-id>
</citation>
</ref>
<ref id="B17">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Pelikan</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Goldberg</surname>
<given-names>D. E.</given-names>
</name>
<name>
<surname>Cantu-Paz</surname>
<given-names>E.</given-names>
</name>
</person-group> (<year>1999</year>). &#x201c;<article-title>BOA: the Bayesian optimisation algorithm</article-title>,&#x201d; in <source>Proceedings of the genetic and evolutionary computation conference (GECCO-99)</source>, <fpage>525</fpage>&#x2013;<lpage>532</lpage>.</citation>
</ref>
<ref id="B18">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Persson</surname>
<given-names>C.</given-names>
</name>
<name>
<surname>Bacher</surname>
<given-names>P.</given-names>
</name>
<name>
<surname>Shiga</surname>
<given-names>T.</given-names>
</name>
<name>
<surname>Madsen</surname>
<given-names>H.</given-names>
</name>
</person-group> (<year>2017</year>). <article-title>Multi-site solar power forecasting using gradient boosted regression trees</article-title>. <source>Sol. Energy</source> <volume>150</volume>, <fpage>423</fpage>&#x2013;<lpage>436</lpage>. <pub-id pub-id-type="doi">10.1016/j.solener.2017.04.066</pub-id>
</citation>
</ref>
<ref id="B19">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Vijay</surname>
<given-names>M.</given-names>
</name>
<name>
<surname>Saravanan</surname>
<given-names>M.</given-names>
</name>
</person-group> (<year>2022</year>). &#x201c;<article-title>Solar irradiance forecasting using bayesian optimization based machine learning algorithm to determine the optimal size of a residential PV system</article-title>,&#x201d; in <source>2022 international conference on sustainable computing and data communication systems (ICSCDS)</source> (<publisher-loc>IEEE</publisher-loc>), <fpage>744</fpage>&#x2013;<lpage>749</lpage>.</citation>
</ref>
<ref id="B20">
<citation citation-type="book">
<person-group person-group-type="author">
<name>
<surname>Xiang</surname>
<given-names>X.</given-names>
</name>
<name>
<surname>Sun</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Deng</surname>
<given-names>X.</given-names>
</name>
</person-group> (<year>2021</year>). <article-title>Short time solar power forecasting using persistence extreme learning machine approach</article-title>. In <source>E3S web of conferences</source>, <publisher-name>EDP Sciences</publisher-name>, (Vol. <volume>294</volume>, <fpage>01002</fpage>). <pub-id pub-id-type="doi">10.1051/e3sconf/202129401002</pub-id>
</citation>
</ref>
<ref id="B21">
<citation citation-type="journal">
<person-group person-group-type="author">
<name>
<surname>Yu</surname>
<given-names>Y.</given-names>
</name>
<name>
<surname>Cao</surname>
<given-names>J.</given-names>
</name>
<name>
<surname>Zhu</surname>
<given-names>J.</given-names>
</name>
</person-group> (<year>2019</year>). <article-title>An LSTM short-term solar irradiance forecasting under complicated weather conditions</article-title>. <source>IEEE Access</source> <volume>7</volume>, <fpage>145651</fpage>&#x2013;<lpage>145666</lpage>. <pub-id pub-id-type="doi">10.1109/access.2019.2946057</pub-id>
</citation>
</ref>
</ref-list>
</back>
</article>