<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.3 20070202//EN" "journalpublishing.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="2.3" xml:lang="EN">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Artif. Intell.</journal-id>
<journal-title>Frontiers in Artificial Intelligence</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Artif. Intell.</abbrev-journal-title>
<issn pub-type="epub">2624-8212</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/frai.2025.1520592</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Artificial Intelligence</subject>
<subj-group>
<subject>Original Research</subject>
</subj-group>
</subj-group>
</article-categories>
<title-group>
<article-title>Integrating generative adversarial networks with IoT for adaptive AI-powered personalized elderly care in smart homes</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author">
<name><surname>Naseer</surname> <given-names>Fawad</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2883807/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
</contrib>
<contrib contrib-type="author" corresp="yes">
<name><surname>Addas</surname> <given-names>Abdullah</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x002A;</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2901463/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/funding-acquisition/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Tahir</surname> <given-names>Muhammad</given-names></name>
<xref ref-type="aff" rid="aff4"><sup>4</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Khan</surname> <given-names>Muhammad Nasir</given-names></name>
<xref ref-type="aff" rid="aff5"><sup>5</sup></xref>
<uri xlink:href="https://loop.frontiersin.org/people/2365795/overview"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/methodology/"/>
<role content-type="https://credit.niso.org/contributor-roles/visualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-review-editing/"/>
<role content-type="https://credit.niso.org/contributor-roles/conceptualization/"/>
<role content-type="https://credit.niso.org/contributor-roles/project-administration/"/>
<role content-type="https://credit.niso.org/contributor-roles/resources/"/>
<role content-type="https://credit.niso.org/contributor-roles/supervision/"/>
<role content-type="https://credit.niso.org/contributor-roles/validation/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
<contrib contrib-type="author">
<name><surname>Sattar</surname> <given-names>Noreen</given-names></name>
<xref ref-type="aff" rid="aff6"><sup>6</sup></xref>
<role content-type="https://credit.niso.org/contributor-roles/data-curation/"/>
<role content-type="https://credit.niso.org/contributor-roles/formal-analysis/"/>
<role content-type="https://credit.niso.org/contributor-roles/investigation/"/>
<role content-type="https://credit.niso.org/contributor-roles/software/"/>
<role content-type="https://credit.niso.org/contributor-roles/writing-original-draft/"/>
</contrib>
</contrib-group>
<aff id="aff1"><sup>1</sup><institution>Department of Computer Science and Software Engineering, Beaconhouse International College</institution>, <addr-line>Faisalabad</addr-line>, <country>Pakistan</country></aff>
<aff id="aff2"><sup>2</sup><institution>Department of Civil Engineering, College of Engineering, Prince Sattam Bin Abdulaziz University</institution>, <addr-line>Alkharj</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff3"><sup>3</sup><institution>Landscape Architecture Department, Faculty of Architecture and Planning, King Abdulaziz University</institution>, <addr-line>Jeddah</addr-line>, <country>Saudi Arabia</country></aff>
<aff id="aff4"><sup>4</sup><institution>Department of Computer Software Engineering, Sir Syed University of Engineering and Technology</institution>, <addr-line>Karachi</addr-line>, <country>Pakistan</country></aff>
<aff id="aff5"><sup>5</sup><institution>Department of Electrical Engineering, Government College University Lahore</institution>, <addr-line>Lahore</addr-line>, <country>Pakistan</country></aff>
<aff id="aff6"><sup>6</sup><institution>Computer Science Department, University of Agriculture Faisalabad (UAF)</institution>, <addr-line>Faisalabad</addr-line>, <country>Pakistan</country></aff>
<author-notes>
<fn fn-type="edited-by" id="fn0001">
<p>Edited by: Anas Bilal, Hainan Normal University, China</p>
</fn>
<fn fn-type="edited-by" id="fn0002">
<p>Reviewed by: Giovanni Pau, Kore University of Enna, Italy</p>
<p>Mazhar Malik, University of the West of England, United Kingdom</p>
<p>Raheem Sarwar, Manchester Metropolitan University, United Kingdom</p>
<p>Din Bandhu, Manipal Academy of Higher Education, India</p>
</fn>
<corresp id="c001">&#x002A;Correspondence: Abdullah Addas, <email>a.addas@psau.edu.sa</email></corresp>
</author-notes>
<pub-date pub-type="epub">
<day>13</day>
<month>02</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<year>2025</year>
</pub-date>
<volume>8</volume>
<elocation-id>1520592</elocation-id>
<history>
<date date-type="received">
<day>31</day>
<month>10</month>
<year>2024</year>
</date>
<date date-type="accepted">
<day>23</day>
<month>01</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x00A9; 2025 Naseer, Addas, Tahir, Khan and Sattar.</copyright-statement>
<copyright-year>2025</copyright-year>
<copyright-holder>Naseer, Addas, Tahir, Khan and Sattar</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/">
<p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (CC BY). The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</p>
</license>
</permissions>
<abstract>
<p>The need for effective and personalized in-home solutions will continue to rise with the world population of elderly individuals expected to surpass 1.6 billion by the year 2050. The study presents a system that merges Generative Adversarial Network (GAN) with IoT-enabled adaptive artificial intelligence (AI) framework for transforming personalized elderly care within the smart home environment. The reason for the application of GANs is to generate synthetic health data, which in turn addresses the scarcity of data, especially of some rare but critical conditions, and helps enhance the predictive accuracy of the system. Continuous data collection from IoT sensors, including wearable sensors (e.g., heart rate monitors, pulse oximeters) and environmental sensors (e.g., temperature, humidity, and gas detectors), enables the system to track vital indications of health, activities, and environment for early warnings and personalized suggestions through real-time analysis. The AI adapts to the unique pattern of healthy and behavioral habits in every individual&#x2019;s lifestyle, hence offering personalized prompts, reminders, and sends off emergency alert notifications to the caregiver or health provider, when required. We were showing significant improvements like 30% faster detection of risk conditions in a large-scale real-world test setup, and 25% faster response times compared with other solutions. GANs applied to the synthesis of data enable more robust and accurate predictive models, ensuring privacy with the generation of realistic yet anonymized health profiles. The system merges state-of-the-art AI with GAN technology in advancing elderly care in a proactive, dignified, secure environment that allows improved quality of life and greater independence for the aging individual. The work hence provides a novel framework for the utilization of GAN in personalized healthcare and points out that this will help reshape elderly care in IoT-enabled &#x201C;smart&#x201D; homes.</p>
</abstract>
<kwd-group>
<kwd>generative adversarial networks (GANs)</kwd>
<kwd>personalized elderly care</kwd>
<kwd>IoT-enabled smart homes</kwd>
<kwd>adaptive artificial intelligence</kwd>
<kwd>predictive healthcare analytics</kwd>
<kwd>synthetic health data</kwd>
<kwd>proactive health monitoring</kwd>
<kwd>healthcare AI applications</kwd>
</kwd-group>
<contract-num rid="cn1">PSAU/2024/01/99520</contract-num>
<contract-sponsor id="cn1">Prince Sattam bin Abdulaziz University<named-content content-type="fundref-id">10.13039/100009392</named-content></contract-sponsor>
<counts>
<fig-count count="13"/>
<table-count count="13"/>
<equation-count count="22"/>
<ref-count count="44"/>
<page-count count="21"/>
<word-count count="12266"/>
</counts>
<custom-meta-wrap>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value>Medicine and Public Health</meta-value>
</custom-meta>
</custom-meta-wrap>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="sec1">
<label>1</label>
<title>Introduction</title>
<p>The world&#x2019;s elderly population is growing at a phenomenal rate: 1.6 billion people over age 65 are forecasted by 2050. These changes provide a very challenging task for health care systems around the world because the systems must cope with the needs of aging individuals. With it often comes heightened vulnerability to chronic diseases and cognitive impairment, along with a range of other health issues that create a growing demand for innovative care models centered on longevity and quality of life. Recent advancements in AI (<xref ref-type="bibr" rid="ref29">Nayak et al., 2024</xref>) and IoT (<xref ref-type="bibr" rid="ref16">Khan and Naseer, 2020</xref>; <xref ref-type="bibr" rid="ref24">Naseer et al., 2023a</xref>,<xref ref-type="bibr" rid="ref25">b</xref>) have been crucial in attempting to respond to such needs, especially in smart home environments where these technologies are applied to provide personalized in-home care.</p>
<p>The most recent promising AI technologies for biomedical informatics include the GANs (<xref ref-type="bibr" rid="ref8">Dakshit and Prabhakaran, 2024</xref>), mainly in healthcare, to overcome data sparsity and improve predictive power. GANs synthesize data that is quite indistinguishable from real-world data with improved diagnostic models (<xref ref-type="bibr" rid="ref13">Huynh et al., 2024</xref>), reduction of class imbalance, and robust predictive analytics for precision care. In this work, the GANs are implemented for truly adaptive AI-driven systems in IoT-enabled smart home elderly care (<xref ref-type="bibr" rid="ref15">Khan et al., 2024</xref>). By integrating GANs into IoT-enabled systems, this study demonstrates their potential to enhance elderly care by providing scalable, data-driven, and adaptive solutions (<xref ref-type="bibr" rid="ref6">Chen et al., 2022</xref>).</p>
<p>The resulting systems based on AI learn continuously from the individual-specific health indicators and behavior through integration with GANs, observing the improvements in elderly care. IoT-enabled smart homes can offer automatic sensing for vital signs, mobility, medication adherence, and environmental factors-all input to feed an AI system for the purpose of anticipating future health risks and support through personalized interventions. It can give personalized reminders to take medication, prescribe exercises, or send notifications to caregivers in case of abnormal variation in vital signs. We apply GANs in generating anonymized synthetic health data, thereby offering not only higher accuracy in models but also helping solve some key challenges related to privacy concerns in personalized health care.</p>
<p>The entity-relationship diagram (ERD) in <xref ref-type="fig" rid="fig1">Figure 1</xref> for the intelligent GAN based AI system for personalized elderly care in IoT-enabled smart homes illustrates the interactions between various entities. Users, which are elder patients in our context have preferences that are stored in the user preferences entity and receive sensor data from different IoT sensors located in the smart home. Sensors generate sensor data, which the AI model processes to give feedback. This feedback is updated based on the AI model, which is regularly updated with new algorithm types and timestamps. The feedback is then used to improve the user experience and care in the smart home environment.</p>
<fig position="float" id="fig1">
<label>Figure 1</label>
<caption>
<p>Entity-relationship diagram (ERD) of the proposed system: this diagram depicts the interaction between wearable and environmental sensors, AI modules, and GANs. It highlights the data flow for real-time monitoring, synthetic data generation, and secure storage, showcasing the system&#x2019;s design for scalability, adaptability, and privacy compliance in IoT-enabled elderly care.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g001.tif"/>
</fig>
<p>The present research is dedicated to the quality testing of GAN-driven AI systems in managing elderly care processes, such as the early detection of health risk cases and fast response times in emergencies, while paying adequate attention to accessibility, user experience, and privacy. During trials, the system supported a 30% improvement in early risk detection, with time-to-detect reduced by a factor of 30%, and reduced emergency response times by 25% compared to conventionally executed processes. By embedding GANs within this adaptive AI framework, we seek to establish a scalable, user-centered model for elderly care that empowers seniors with the ability to live independently with dignity and security.</p>
<p>However, there are a few challenges that stay in the way of the adoption of AI and GAN technologies in the care for the elderly: regulatory compliance, technical accessibility by elderly users, and integration into the existing health framework. This will also require conducting more research in this line to address these challenges and explore GAN&#x2019;s full potential for developing an accurate and diverse data set that will support predictive analytics and personalized interventions in elder care. The current study will add to the increasing interest in AI-driven development of healthcare solutions and hence put forward a blueprint for incorporation of GANs and IoT technologies in the eldercare system.</p>
<p>The study will provide an overview of the literature in Section 2, followed by our research methodology in Section 3. Section 4 discusses our findings, with detailed analysis and discussion in Section 5. Finally, Section 6 concludes with implications and potential directions for future research.</p>
</sec>
<sec id="sec2">
<label>2</label>
<title>Literature review</title>
<p>Advanced practice nursing plays a crucial role in providing senior home care. The model she presents keeps patients safe, avoids hospital visits, and postpones admission to long-term care facilities by using appropriate treatments, accurate assessments, and collaborative approaches. The treatment and quality of life for senior patients are greatly enhanced by this paradigm (<xref ref-type="bibr" rid="ref41">Wicaksono et al., 2023</xref>; <xref ref-type="bibr" rid="ref42">Wojda et al., 2023</xref>). There is a need for policies that support this movement, which indicates a shift towards community-based care and a reduction in institutional capacity. Explains how the IoT and AI are transforming traditional elder care models to solve issues like the lack of home health attendants and the rising care needs of an aging population. Highlight a pilot project that aims to improve patient-centered care in primary settings by concentrating on self-care in the management of chronic pain. The study involves patients, caregivers, doctors, and support workers. This study highlights the importance that medical assistants and IT specialists play in developing patient-centered practices. Authors examine the literature using patient satisfaction as a telehealth&#x2019;s efficaciousness measure. It is determined that factors like enhanced communication, reduced travel times, low cost, easier of use, and better outcomes are the key drivers of telehealth satisfaction. These studies emphasize patient-centered care, stakeholder involvement, and telehealth to improve elderly healthcare (<xref ref-type="bibr" rid="ref11">Elder, 2017</xref>; <xref ref-type="bibr" rid="ref30">Patterson et al., 2021</xref>). A comprehensive review of the theory and various applications of Generative Adversarial Networks (GANs), highlighting their impact on fields such as image segmentation, medicine, and 3D object generation (<xref ref-type="bibr" rid="ref2">Aggarwal et al., 2021</xref>).</p>
<sec id="sec3">
<label>2.1</label>
<title>IoT in elderly care</title>
<p>Explore the intersection of big data and IoT analytics in biomedical and healthcare technologies, emphasizing the application of machine learning and AI techniques for remote diagnostics and telemedicine. They discuss the adaptability of AI-based telemedicine in advancing healthcare (<xref ref-type="bibr" rid="ref4">Banerjee et al., 2020</xref>; <xref ref-type="bibr" rid="ref17">Kim et al., 2020</xref>). Highlight the transformative impact of AI and IoT technologies in healthcare, introducing innovative diagnostic tools and care strategies. Underscores IoT&#x2019;s role in healthcare, facilitating remote monitoring, smart sensors, and medication delivery to enhance patient care despite IT management challenges. Focus on IoT systems in smart homes for elderly care, <xref ref-type="bibr" rid="ref23">Narejo (2020)</xref> and <xref ref-type="bibr" rid="ref36">Sokullu et al. (2020)</xref> describing how environmental sensors provide context-aware monitoring to ensure safety and provide early warnings for individuals with memory issues. They present a low-cost prototype integrating emergency features for handling critical situations. IoT based waste management system is one example which empowers the smart home-based system in this technology-oriented era (<xref ref-type="bibr" rid="ref1">Addas et al., 2024</xref>).</p>
<p>Regarding the growing need for eldercare because of the world&#x2019;s aging population, <xref ref-type="bibr" rid="ref37">Stavropoulos et al. (2020)</xref> draw attention to ambient assisted living technology such as wearables from the IoT and sensors that allow for remote monitoring and assistance. After reviewing case studies, they describe current trends and prospects in effective eldercare technology, grouping solutions by aims, durations of experiments, IoT technologies, and result measurements (<xref ref-type="bibr" rid="ref12">Hassan et al., 2020</xref>). A Spark streaming framework for real-time health status prediction, with an emphasis on real-time tracking in healthcare, using wearable sensors and mobile apps. They emphasize potential in diabetes treatment. Their method combines streaming machine learning models to achieve high accuracy in health data processing. IoT-based data mining and AI are supported by the healthcare industry to solve issues such as false and incomplete medical records (<xref ref-type="bibr" rid="ref22">Muthu et al., 2020</xref>). They suggest the Generalized Approximate Reasoning-based Intelligence Control (GARIC), which emphasizes tailored health alerts and therapies, for the study of patient data and illness prediction. Investigate how technology may help reduce senior loneliness amidst the COVID-19 epidemic. They recommend judicious use of technology in crisis communications and care initiatives to help older individuals feel less alone. They emphasize the value of easily available and adaptable treatments for different forms of loneliness and demographic factors (<xref ref-type="bibr" rid="ref7">Conroy et al., 2020</xref>).</p>
<p>Authors discussed the increasing challenges of aging populations, emphasizing the integration of AI and IoT in assisted living and healthcare monitoring for older people. They provide a comprehensive overview, comparing techniques and application scenarios while discussing the benefits and drawbacks of these technologies (<xref ref-type="bibr" rid="ref33">Qian et al., 2021</xref>) explore the shift towards personalized healthcare systems using AI and machine learning, focusing on disease diagnosis, health monitoring via wearables, and assistive frameworks like social robots. They review current smart healthcare systems, highlighting integration designs critical for intelligent healthcare solutions (<xref ref-type="bibr" rid="ref28">Nasr et al., 2021</xref>).</p>
</sec>
<sec id="sec4">
<label>2.2</label>
<title>AI applications in healthcare</title>
<p>Recent advancements in healthcare management systems (HMS) have been significantly influenced by the convergence of blockchain, IoT, and AI technologies. Individualized approaches have replaced traditional hub-based systems (<xref ref-type="bibr" rid="ref14">Junaid et al., 2022</xref>). However, heterogeneity of devices, disparate IoT designs, and a lack of competitively priced smart sensors are barriers to interoperability and data integration in the deployment. Despite the benefits of telehealth for senior care, privacy issues, especially in this context, provide substantial obstacles that must be addressed with improved privacy safeguards such as informed consent to increase adoption rates. Innovations in AI and IoT are also essential to improving telemedicine, especially in fall detection systems that use cutting-edge algorithms and sensor technology (<xref ref-type="bibr" rid="ref32">Pool et al., 2022</xref>; <xref ref-type="bibr" rid="ref40">Wang et al., 2022</xref>). In addition, the shifts in Chinese society on senior care. In addition, the shifts in Chinese society towards elder care underscore the stress placed on caregivers and the necessity of all-encompassing methods to assist both the elderly and their careers (<xref ref-type="bibr" rid="ref3">Ai et al., 2022</xref>).</p>
<p>Recent studies show that edge AI is boosting decision-making and operational efficiency without requiring extensive infrastructure, transforming a variety of industries. Telepresence robots have become essential tools during the COVID-19 pandemic, helping to overcome psychological barriers to human interaction and ease remote connections and healthcare delivery (<xref ref-type="bibr" rid="ref26">Naseer et al., 2023c</xref>,<xref ref-type="bibr" rid="ref27">d</xref>; <xref ref-type="bibr" rid="ref35">Sivabalan and Minu, 2022</xref>). The development of smart healthcare systems is leading to the creation of &#x201C;intelligent hospitals&#x201D; of the future through applications such as non-contact health screenings and intelligent hospital guidance. A critical factor in the digital transformation of healthcare systems is the incorporation of AI, with particular attention paid to data infrastructure, system integration, and real-world applications. Mobile healthcare apps offer practical solutions for medical record-keeping and patient communication, though concerns persist regarding privacy and data security, prompting discussions on regulatory frameworks and risk mitigation strategies (<xref ref-type="bibr" rid="ref10">Ding et al., 2022</xref>; <xref ref-type="bibr" rid="ref18">Kumar et al., 2023</xref>; <xref ref-type="bibr" rid="ref20">Monlezun, 2023</xref>).</p>
<p>Telepresence robotics that involve communication delays contain certain difficulties that are solved using a combined approach that uses deep reinforcement learning methods including double deep Q network (DDQN) and gated recurrent units (GRU) methods that help to control depending on time intervals (<xref ref-type="bibr" rid="ref24">Naseer et al., 2023a</xref>,<xref ref-type="bibr" rid="ref25">b</xref>). The appropriateness of wearable smart sensors in complementing public health projects via suitable technology incorporation has been shown revealing that these sensors are being investigated for disease control and surveillance of vital signs in epidemics. The legal and ethical issues of edge AI in healthcare point out some regulations and the possible ways to apply them responsibly in several domains. Despite challenges in data management and legal systems, the adoption of AI in healthcare leads to enhanced medical services regarding diagnosis and personalized treatment programs (<xref ref-type="bibr" rid="ref9">Das et al., 2023</xref>; <xref ref-type="bibr" rid="ref19">Mohammadzadeh et al., 2020</xref>).</p>
</sec>
<sec id="sec5">
<label>2.3</label>
<title>GANs in healthcare data generation</title>
<p>In recent years, GANs have gained significant attention for their ability to generate realistic data across various applications, including computer vision and natural language processing. <xref ref-type="bibr" rid="ref43">Zhang et al. (2023)</xref> proposed a Robust Generative Adversarial Network (RGAN) that enhances the generalization capabilities of GANs by promoting local robustness within the training sample neighborhood, thereby addressing common issues of instability and poor generation quality associated with traditional GANs. <xref ref-type="bibr" rid="ref5">Cai et al. (2021)</xref> provided a comprehensive survey on GANs, highlighting their applications and the challenges faced in privacy and security contexts, which further emphasizes the versatility and importance of GANs in modern research. Additionally, <xref ref-type="bibr" rid="ref44">Zheng et al. (2019)</xref> introduced the Vehicle Synthesis Generative Adversarial Networks (VS-GANs) framework, which effectively generates annotated vehicle images from remote sensing data, significantly improving vehicle detection performance in high-resolution images. This post-modern technology involving AI, which is increasingly used in the medical and health fields, enables the analysis of biological aspects and health conditions by interaction and correlation (<xref ref-type="bibr" rid="ref34">Shaban-Nejad et al., 2022</xref>). AI analyzes findings using a variety of modalities. Particularly in remote patient monitoring and chronic disease management, wearable health technology has greatly revolutionized healthcare by enabling proactive health management through accurate biometric tracking and real-time monitoring. Detailed insight of AI and IoT in processing the large volume of data concerning health has enhanced the challenging aspects of data processing and ensured secure and integrated healthcare infrastructure at a large scale (<xref ref-type="bibr" rid="ref21">Mullankandy et al., 2024</xref>; <xref ref-type="bibr" rid="ref39">Tariq, 2024</xref>). These advancements highlight the transformative potential of AI-driven technologies in enhancing health monitoring, decision-making, and data management in modern healthcare systems.</p>
<p>While previous works have presented discussions on the transformative potential of GANs in healthcare applications, most of them usually forget to discuss some of the key challenges that may be evident in real-world applications. Additionally, GAN is susceptible to mode collapse&#x2014;a certain model generates limited variations in generating data, which can challenge the diversity needed for analytics associated with robust healthcare. On the other hand, another critical issue is privacy concern in IoT-enabled smart homes, which remain explored to a lesser extent&#x2014;one that concerns secure transmission and, at the same time, storage of sensitive health data. It comes to show that so far, the frameworks are able to meet the demands and privacy implications of IoT ecosystems while raising GAN performance. Precisely, we also focus on such limitations of integrating various robust privacy measures, specifically anonymization and encryption, taking into account a scalable architecture towards personalized elderly care within this paper.</p>
</sec>
</sec>
<sec sec-type="methods" id="sec6">
<label>3</label>
<title>Methodology</title>
<p>The specific approach for adaptive AI in managing and performing elderly care in smart homes concerning the IoT is composed of various layers that work as a single system to acquire, process, and adapt accordingly to the desired goals and objectives. This innovative system has the clear objective of increasing the security and quality of life for elderly people through IoT sensors and AIs. The introduced adaptive AI system is placed in IoT-based smart homes, where elderly people live, and utilizes both wearable and environmental sensors for efficient care. As illustrated in <xref ref-type="fig" rid="fig2">Figure 2</xref>, the proposed system starts with sensors&#x2019; initialization and data acquisition, where health parameters (for example, pulse, blood pressure), environmental parameters (for example, temperature, humidity), and so on are constantly received and transferred to the main core. The data collected and processed in Internet of Medical Things (IoMT) devices are noisy and, hence, require preprocessing through noise reduction, normalization, and data segmentation before transmitting to the IoMT cloud. In the cloud, Convolutional Neural Networks (CNNs) for image data, and Wavelet Artificial Neural Networks (WANN) for the time-series data identify the health anomalies and environmental threats. Abnormalities that are identified elicit notifications which are relayed through the wearable apparatus to the participants and the caregivers through the short messaging service and e-mail correspondences. Recommendations from participants and caregivers, on the guiding of AI models&#x2019; alteration and systems for improved performance, make continuous improvement.</p>
<fig position="float" id="fig2">
<label>Figure 2</label>
<caption>
<p>Proposed system with integrating GANs with IoT for adaptive AI-powered personalized elderly care.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g002.tif"/>
</fig>
<p>CNNs are superior at handling image data, while WANNs handle time series data. Indeed, CNNs would be quite apt for image data given that the architecture extracts spatial features through convolutional layers, which would make them suitable for tasks such as fall detection or environmental hazard recognition. WANNs were chosen for time-series data since they are capable of analyzing nonlinear patterns or transient signals, which are very important for health anomalies detection from wearable sensors. As far as the nature of the data is considered, the RNNs are also feasible in the case of time-series data. However, WANN was chosen over RNN because it guarantees much better computation efficiency as well as superior handling characteristics with respect to real-time high-frequency signals, exactly meeting the demands proposed in this framework. This model selection balances the trade-offs in accuracy, speed, and scalability of the system&#x2014;a must for IoT-enabled healthcare applications. The GAN architecture employed in this study consists of a generator and a discriminator, both designed to handle multidimensional health and environmental data. The generator is a deep neural network with three fully connected layers and Leaky ReLU activation functions to synthesize realistic health data. The discriminator, on the other hand, is a CNN with four layers, utilizing a Sigmoid activation function in the final layer to classify real versus synthetic data.</p>
<p>Training parameters were set as follows: a learning rate of 0.0002 for both networks, a batch size of 64, and an Adam optimizer with beta1&#x202F;=&#x202F;0.5 and beta2&#x202F;=&#x202F;0.999. The networks were trained over 50 epochs with a loss function comprising binary cross-entropy for the discriminator and a mean squared error for the generator. To ensure the stability of training, we incorporated gradient penalty regularization and batch normalization techniques. These configurations were fine-tuned through multiple iterations to balance the generator-discriminator interplay, yielding high-quality synthetic health data while maintaining computational efficiency.</p>
<sec id="sec7">
<label>3.1</label>
<title>GAN integration into the proposed system</title>
<p>GANs are a core component of the proposed system, enhancing its ability to address data scarcity and privacy concerns. The GAN architecture comprises a generator and a discriminator network:<list list-type="order">
<list-item>
<p><bold>Architecture</bold>:</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p><bold>Generator</bold>: A deep neural network with three fully connected layers and Leaky ReLU activation functions, designed to produce synthetic health data that mimics real-world patterns.</p>
</list-item>
<list-item>
<p><bold>Discriminator</bold>: A convolutional neural network (CNN) with four layers and Sigmoid activation, tasked with distinguishing between real and synthetic data.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p><bold>Training Process</bold>:</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>The GAN is trained using real-world health and environmental data collected from IoT sensors. The generator creates synthetic samples, which are evaluated by the discriminator.</p>
</list-item>
<list-item>
<p>The adversarial process optimizes both networks iteratively, minimizing a combined loss function: binary cross-entropy for the discriminator and mean squared error for the generator.</p>
</list-item>
<list-item>
<p>Training parameters include a learning rate of 0.0002, batch size of 64, and 50 epochs, optimized through hyperparameter tuning.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p><bold>Synthetic Data Generation</bold>:</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>The GAN generates anonymized health data (e.g., heart rate patterns, activity levels) and environmental data (e.g., temperature fluctuations, gas levels). This data augments the existing dataset, ensuring diversity and filling gaps for rare scenarios.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p><bold>Performance Improvement</bold>:</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>Compared to traditional oversampling methods, GANs create high-quality, diverse datasets, reducing overfitting and improving the predictive accuracy of AI models. For instance, synthetic data generated by the GAN enhanced fall detection accuracy by 5% and reduced false positives in anomaly detection by 8%, as validated in simulation and field tests.</p>
</list-item>
</list></p>
<p>By leveraging GANs, the system not only addresses data limitations but also enhances privacy and overall performance, making it a robust solution for IoT-enabled elderly care.</p>
<p>The components of embedded systems for hardware implementation involve using microcontrollers such as Arduino for data aggregation, health/environmental sensors for monitoring health/environment, and wireless modules such as Raspberry Pi for transmitting collected data to central units. Such a setup as shown in <xref ref-type="fig" rid="fig3">Figure 3</xref>, helps in the free flow of data or information which is vital for the kind of care delivery that is real-time. In validation, great concerns are taken in ensuring that the data is of quality, and model training with cross-validation technique heavily adheres to maximize accuracy and reliability. In summary, the system can be beneficial in Intelligent Homes to monitor the real-time health condition and status of elderly patient care and safety while promoting elder&#x2019;s home liveliness.</p>
<fig position="float" id="fig3">
<label>Figure 3</label>
<caption>
<p>Embedded hardware components of proposed system.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g003.tif"/>
</fig>
<p>The Perception Layer is the basic layer in this system, including sensors such as wearable devices tracking health indications of one&#x2019;s body&#x2014;for example, pulse and body temperature&#x2014;and environmental sensors tracking the humidity, temperature, and gas levels of one&#x2019;s surroundings. These sensors continuously feed in inputs to form a basis for real-time processing of data and decision-making. Starting the chain is the Perception Layer which includes wearable sensors, environmental sensors, and smart objects. Smartwatches and medical-grade wearables track body metrics like pulse, blood pressure, body temperature, and others. Several parameters of the environment include the temperature, the level of humidity, and the number of certain gases, thus, monitoring the total condition of the elderly person. Cameras and motion detectors the so-called smart devices facilitate how falls are detected and what activities the patient undertakes. Data gathered from sensors is sent to a central node with the help of Bluetooth or Wi-Fi and forms the basis of the Communication Layer. From the central hub, data is then directed to an IoMT cloud for archiving and processing in real-time. This layer makes certain that there is proper transfer of data from sensors to the cloud computing framework. The core of the system belongs to the Processing Layer where the pre-processing of data, application of the AI Model, and generation of insights occurs. Preprocessing of the data entails data cleansing, normalization, and segmentation to make it suitable for the AI algorithms. For health anomaly detection, WANN and for environmental hazards, CNNs are used from the processed data. The Application Layer also involves the management of interactions with the user and the alerts. An end-user interface through mobile apps or web interfaces for the caregivers or other medical staff makes it possible to get real-time updates and get alarms or notifications in case of the occurrence of an event of concern to the system. Information is passed on through short message service, email, or a voice call; help is summoned in case of an emergency. Thus, <xref ref-type="sec" rid="sec8">Algorithm 1</xref> describes the implementation of layers, namely Perception, Communication, Processing, and Application, would ensure interoperability of the systems. The computer processing of data is a distributed process with low latency in nodes of wearable devices and central units while the heavy data processing and long-term data storing is done in the cloud. VP &#x0026; ACM also provides various tiers in the architecture to enhance the system performance and the scale of elderly care delivery.</p>
<sec id="sec8">
<label>ALGORITHM 1</label>
<title>Proposed algorithm</title>
<p>
<table-wrap position="anchor" id="tab1">
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="middle">Steps</th>
<th align="left" valign="middle">Process description</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="middle">1</td>
<td align="left" valign="middle"><bold>Initialize System</bold></td>
</tr>
<tr>
<td align="left" valign="middle">1.1</td>
<td align="left" valign="middle">Initialize wearable (<italic>SensorData</italic>) and environmental sensors (<italic>EnvironmentalData</italic>)</td>
</tr>
<tr>
<td align="left" valign="middle">1.2</td>
<td align="left" valign="middle">Initialize central hub and cloud infrastructure</td>
</tr>
<tr>
<td align="left" valign="middle">2</td>
<td align="left" valign="middle"><bold>Data Collection</bold></td>
</tr>
<tr>
<td align="left" valign="middle">2.1</td>
<td align="left" valign="middle"><bold>WHILE</bold> system is active<break/><bold>DO</bold></td>
</tr>
<tr>
<td align="left" valign="middle">2.2</td>
<td align="left" valign="middle"><bold>FOR</bold> each wearable sensor<break/><bold>DO</bold></td>
</tr>
<tr>
<td align="left" valign="middle">2.3</td>
<td align="left" valign="middle">Collect health data (<italic>HealthMetrics</italic>) - (heart rate, blood pressure, activity levels)</td>
</tr>
<tr>
<td align="left" valign="middle">2.4</td>
<td align="left" valign="middle">Transmit data to central hub</td>
</tr>
<tr>
<td align="left" valign="middle">2.5</td>
<td align="left" valign="middle"><bold>END FOR</bold></td>
</tr>
<tr>
<td align="left" valign="middle">2.6</td>
<td align="left" valign="middle"><bold>FOR</bold> each environmental sensor<break/><bold>DO</bold></td>
</tr>
<tr>
<td align="left" valign="middle">2.7</td>
<td align="left" valign="middle">Collect environmental data (temperature, humidity, gas levels)</td>
</tr>
<tr>
<td align="left" valign="middle">2.8</td>
<td align="left" valign="middle">Transmit data to central hub</td>
</tr>
<tr>
<td align="left" valign="middle">2.9</td>
<td align="left" valign="middle"><bold>END FOR</bold></td>
</tr>
<tr>
<td align="left" valign="middle">2.10</td>
<td align="left" valign="middle"><bold>END WHILE</bold></td>
</tr>
<tr>
<td align="left" valign="middle">3</td>
<td align="left" valign="middle"><bold>Data Preprocessing</bold></td>
</tr>
<tr>
<td align="left" valign="middle">3.1</td>
<td align="left" valign="middle"><bold>FOR</bold> each data stream received by central hub<break/><bold>DO</bold></td>
</tr>
<tr>
<td align="left" valign="middle">3.2</td>
<td align="left" valign="middle">Apply noise reduction (e.g., low-pass filter for accelerometer and gyroscope data)</td>
</tr>
<tr>
<td align="left" valign="middle">3.3</td>
<td align="left" valign="middle">Normalize data (e.g., min-max scaling)</td>
</tr>
<tr>
<td align="left" valign="middle">3.4</td>
<td align="left" valign="middle">Segment data into fixed-length windows (e.g., 5 seconds)</td>
</tr>
<tr>
<td align="left" valign="middle">3.5</td>
<td align="left" valign="middle"><bold>END FOR</bold></td>
</tr>
<tr>
<td align="left" valign="middle">4</td>
<td align="left" valign="middle"><bold>Data Transmission to Cloud</bold></td>
</tr>
<tr>
<td align="left" valign="middle">4.1</td>
<td align="left" valign="middle">Transmit pre-processed data from central hub to IoMT cloud</td>
</tr>
<tr>
<td align="left" valign="middle">5</td>
<td align="left" valign="middle"><bold>AI Model Analysis</bold></td>
</tr>
<tr>
<td align="left" valign="middle">5.1</td>
<td align="left" valign="middle"><bold>FOR</bold> each data window received in cloud<break/><bold>DO</bold></td>
</tr>
<tr>
<td align="left" valign="middle">5.2</td>
<td align="left" valign="middle"><bold>IF</bold> data is from wearable sensors<break/><bold>THEN</bold></td>
</tr>
<tr>
<td align="left" valign="middle">5.3</td>
<td align="left" valign="middle">Apply WANN to detect health anomalies</td>
</tr>
<tr>
<td align="left" valign="middle">5.4</td>
<td align="left" valign="middle"><bold>ELSE IF</bold> data is from environmental sensors<break/><bold>THEN</bold></td>
</tr>
<tr>
<td align="left" valign="middle">5.5</td>
<td align="left" valign="middle">Apply CNN to detect environmental hazards</td>
</tr>
<tr>
<td align="left" valign="middle">5.6</td>
<td align="left" valign="middle"><bold>END IF</bold></td>
</tr>
<tr>
<td align="left" valign="middle">5.7</td>
<td align="left" valign="middle"><bold>END FOR</bold></td>
</tr>
<tr>
<td align="left" valign="middle">6</td>
<td align="left" valign="middle"><bold>Anomaly Detection and Alert Generation</bold></td>
</tr>
<tr>
<td align="left" valign="middle">6.1</td>
<td align="left" valign="middle"><bold>FOR</bold> each detected anomaly<break/><bold>DO</bold></td>
</tr>
<tr>
<td align="left" valign="middle">6.2</td>
<td align="left" valign="middle">Generate alert with details of the anomaly</td>
</tr>
<tr>
<td align="left" valign="middle">6.3</td>
<td align="left" valign="middle">Transmit alert to central hub</td>
</tr>
<tr>
<td align="left" valign="middle">6.4</td>
<td align="left" valign="middle"><bold>END FOR</bold></td>
</tr>
<tr>
<td align="left" valign="middle">7</td>
<td align="left" valign="middle"><bold>Alert Notification</bold></td>
</tr>
<tr>
<td align="left" valign="middle">7.1</td>
<td align="left" valign="middle"><bold>FOR</bold> each alert received by central hub DO</td>
</tr>
<tr>
<td align="left" valign="middle">7.2</td>
<td align="left" valign="middle">Notify participant via wearable device</td>
</tr>
<tr>
<td align="left" valign="middle">7.3</td>
<td align="left" valign="middle">Notify caregivers via SMS and email</td>
</tr>
<tr>
<td align="left" valign="middle">7.4</td>
<td align="left" valign="middle"><bold>END FOR</bold></td>
</tr>
<tr>
<td align="left" valign="middle">8</td>
<td align="left" valign="middle"><bold>Continuous Improvement</bold></td>
</tr>
<tr>
<td align="left" valign="middle">8.1</td>
<td align="left" valign="middle">Collect feedback from participants and caregivers</td>
</tr>
<tr>
<td align="left" valign="middle">8.2</td>
<td align="left" valign="middle">Update AI models with new data and feedback</td>
</tr>
<tr>
<td align="left" valign="middle">8.3</td>
<td align="left" valign="middle">Retrain models periodically to improve accuracy and reliability</td>
</tr>
<tr>
<td align="left" valign="middle">8.4</td>
<td align="left" valign="middle">Monitor system performance and make necessary adjustments</td>
</tr>
<tr>
<td align="left" valign="middle">9</td>
<td align="left" valign="middle"><bold>ENDING</bold></td>
</tr>
<tr>
<td align="left" valign="middle">9.1</td>
<td align="left" valign="middle">Terminate data collection and alert processes</td>
</tr>
<tr>
<td align="left" valign="middle">9.2</td>
<td align="left" valign="middle">Shut down sensors and central hub</td>
</tr>
</tbody>
</table>
</table-wrap>
</p>
</sec>
</sec>
<sec id="sec9">
<label>3.2</label>
<title>Proposed system explanation</title>
<p>The proposed system will adapt over time through continuous feedback loops and reinforcement learning mechanisms. User-specific data, such as health metrics and behavioral patterns, dynamically fine-tune the system&#x2019;s predictions and interventions. For example, if a user&#x2019;s activity patterns change due to a temporary condition, the system adjusts its alerts and thresholds to minimize false positives. Reinforcement learning enhances this further with the ability to reward the model for good predictions and penalize it for bad ones, hence improving the model&#x2019;s ability to make better decisions over time. Examples of personalized functionality include tailored prompts, such as reminding a user to drink more often when high levels of activity are detected, or adjusting fall detection thresholds based on observed walking stability. Such adaptive interventions ensure that the system remains responsive to individual needs and hence more effective in naturalistic settings.</p>
<p>The proposed system integrates a variety of IoT hardware components to ensure comprehensive monitoring and real-time data processing:<list list-type="order">
<list-item>
<p>
<bold>Wearable Sensors</bold>
</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p><bold>Heart Rate Monitor</bold>: Polar H10 sensors were used for accurate measurement of heart rate and pulse, with a sampling rate of 1&#x202F;Hz.</p>
</list-item>
<list-item>
<p><bold>Accelerometer and Gyroscope</bold>: Integrated into smart wristbands (e.g., Xiaomi Mi Band 6), these sensors track movement patterns and detect potential falls.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p>
<bold>Environmental Sensors</bold>
</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p><bold>Temperature and Humidity Sensors</bold>: DHT22 sensors were deployed for ambient condition monitoring, providing a precision of &#x00B1;0.5&#x00B0;C and&#x202F;&#x00B1;&#x202F;2% for humidity.</p>
</list-item>
<list-item>
<p><bold>Gas Sensors</bold>: MQ-135 sensors were used to detect hazardous gases, including carbon monoxide and methane, with a sensitivity range of 10&#x2013;300&#x202F;ppm.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p>
<bold>Edge Computing Device</bold>
</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>Raspberry Pi 4 Model B served as the edge computing unit, equipped with 8&#x202F;GB RAM and a quad-core ARM Cortex-A72 processor for local data processing and preliminary analysis.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p>
<bold>Communication Protocols</bold>
</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>Devices were connected using MQTT for lightweight messaging and Zigbee for low-power communication between sensors and the edge device.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p>
<bold>Cloud Integration</bold>
</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>AWS IoT Core was employed for cloud storage and remote data analysis, ensuring scalability and secure data exchange.</p>
</list-item>
</list></p>
<sec id="sec10">
<label>3.2.1</label>
<title>Feasibility and limitations</title>
<p>While the hardware components ensure reliable data acquisition and processing, potential limitations include the dependence on stable network connections for cloud functionality and the need for regular maintenance of wearable devices to ensure accuracy. The Shannon-Hartley theorem as in <xref ref-type="disp-formula" rid="EQ1">Equation 1</xref> describes the maximum data rate (C) that can be transmitted over a communication channel with bandwidth (B) and signal-to-noise ratio (SNR):<disp-formula id="EQ1">
<label>(1)</label>
<mml:math id="M1">
<mml:mi>C</mml:mi>
<mml:mo>=</mml:mo>
<mml:mi>B</mml:mi>
<mml:mo>.</mml:mo>
<mml:msub>
<mml:mo>log</mml:mo>
<mml:mn>2</mml:mn>
</mml:msub>
<mml:mfenced open="(" close=")">
<mml:mrow>
<mml:mn>1</mml:mn>
<mml:mo>+</mml:mo>
<mml:mi>S</mml:mi>
<mml:mi>N</mml:mi>
<mml:mi>R</mml:mi>
</mml:mrow>
</mml:mfenced>
</mml:math>
</disp-formula>The overall data stream from multiple sensors can be represented as <xref ref-type="disp-formula" rid="EQ2">Equation 2</xref>:<disp-formula id="EQ2">
<label>(2)</label>
<mml:math id="M2">
<mml:mi>X</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
<mml:mo>=</mml:mo>
<mml:mfenced close="}" open="{">
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mn>1</mml:mn>
</mml:msub>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mn>2</mml:mn>
</mml:msub>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
<mml:mo>,</mml:mo>
<mml:mo>&#x2026;</mml:mo>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>n</mml:mi>
</mml:msub>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
</mml:mrow>
</mml:mfenced>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M3">
<mml:mo stretchy="true">{</mml:mo>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
</mml:math>
</inline-formula> is the data from the <inline-formula>
<mml:math id="M4">
<mml:mi>i</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>t</mml:mi>
<mml:mi>h</mml:mi>
</mml:math>
</inline-formula> sensor.</p>
<p>The degree of connectivity of a device &#x1D463;v in the network is given by the number of edges connected to it in <xref ref-type="disp-formula" rid="EQ4">Equation 3</xref>:<disp-formula id="EQ4">
<label>(3)</label>
<mml:math id="M5">
<mml:mo>deg</mml:mo>
<mml:mfenced open="(" close=")">
<mml:mi>v</mml:mi>
</mml:mfenced>
<mml:mo>=</mml:mo>
<mml:mo>|{</mml:mo>
<mml:mi>e</mml:mi>
<mml:mo>&#x2208;</mml:mo>
<mml:mi>E</mml:mi>
<mml:mspace width="thickmathspace"/>
<mml:mo stretchy="true">|</mml:mo>
<mml:mspace width="thickmathspace"/>
<mml:mi>v</mml:mi>
<mml:mo>&#x2208;</mml:mo>
<mml:mi>e</mml:mi>
<mml:mo stretchy="true">}</mml:mo>
</mml:math>
</disp-formula>The latency <inline-formula>
<mml:math id="M6">
<mml:mfenced open="(" close=")">
<mml:mi>L</mml:mi>
</mml:mfenced>
</mml:math>
</inline-formula> and throughput <inline-formula>
<mml:math id="M7">
<mml:mfenced open="(" close=")">
<mml:mi>T</mml:mi>
</mml:mfenced>
</mml:math>
</inline-formula> of an IoT network can be expressed as <xref ref-type="disp-formula" rid="EQ5">Equation 4</xref>:<disp-formula id="EQ5">
<label>(4)</label>
<mml:math id="M8">
<mml:mi>L</mml:mi>
<mml:mo>=</mml:mo>
<mml:munderover>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x2211;</mml:mo>
</mml:mstyle>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>n</mml:mi>
</mml:munderover>
<mml:msub>
<mml:mi>d</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M9">
<mml:msub>
<mml:mi>d</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:math>
</inline-formula> is the delay for each transmission hop as described in <xref ref-type="disp-formula" rid="EQ6">Equation 5</xref>:<disp-formula id="EQ6">
<label>(5)</label>
<mml:math id="M10">
<mml:mi>T</mml:mi>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mi>L</mml:mi>
</mml:mfrac>
</mml:math>
</disp-formula>The most important architecture for CNNs is convolutional layers. Central to convolution is the simple idea of how a set of filters or kernels, which are learnable parameters, can be applied to the input data as expressed in <xref ref-type="disp-formula" rid="EQ7">Equation 6</xref>.<disp-formula id="EQ7">
<label>(6)</label>
<mml:math id="M11">
<mml:mtable>
<mml:mtr>
<mml:mtd>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
<mml:mfenced open="(" close=")" separators=",">
<mml:mi>i</mml:mi>
<mml:mi>j</mml:mi>
</mml:mfenced>
<mml:mo>=</mml:mo>
<mml:mi>X</mml:mi>
<mml:mo>&#x2217;</mml:mo>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
<mml:mo stretchy="false">)</mml:mo>
<mml:mfenced open="(" close=")" separators=",">
<mml:mi>i</mml:mi>
<mml:mi>j</mml:mi>
</mml:mfenced>
</mml:mtd>
</mml:mtr>
<mml:mtr>
<mml:mtd>
<mml:mo>=</mml:mo>
<mml:munder>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x2211;</mml:mo>
</mml:mstyle>
<mml:mi>m</mml:mi>
</mml:munder>
<mml:munder>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x2211;</mml:mo>
</mml:mstyle>
<mml:mi>n</mml:mi>
</mml:munder>
<mml:msub>
<mml:mi>X</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>m</mml:mi>
<mml:mo>,</mml:mo>
<mml:mi>j</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>n</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mrow>
<mml:mi>k</mml:mi>
<mml:mfenced open="(" close=")" separators=",">
<mml:mi>m</mml:mi>
<mml:mi>n</mml:mi>
</mml:mfenced>
</mml:mrow>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>b</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
</mml:mtd>
</mml:mtr>
</mml:mtable>
</mml:math>
</disp-formula>Where:</p>
<p><inline-formula>
<mml:math id="M12">
<mml:mi>X</mml:mi>
</mml:math>
</inline-formula> is the input image.</p>
<p><inline-formula>
<mml:math id="M13">
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
</mml:math>
</inline-formula> is the <inline-formula>
<mml:math id="M14">
<mml:mi>k</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>t</mml:mi>
<mml:mi>h</mml:mi>
</mml:math>
</inline-formula> filter.</p>
<p><inline-formula>
<mml:math id="M15">
<mml:msub>
<mml:mi>b</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
</mml:math>
</inline-formula> is the bias term for the <inline-formula>
<mml:math id="M16">
<mml:mi>k</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>t</mml:mi>
<mml:mi>h</mml:mi>
</mml:math>
</inline-formula> filter.</p>
<p><inline-formula>
<mml:math id="M17">
<mml:mo>&#x2217;</mml:mo>
</mml:math>
</inline-formula> denotes the convolution operation.</p>
<p>The most common activation function in CNN is the Rectified Linear Unit (ReLU), which can be expressed as in <xref ref-type="disp-formula" rid="EQ10">Equation 7</xref>:<disp-formula id="EQ10">
<label>(7)</label>
<mml:math id="M18">
<mml:mi mathvariant="italic">ReLU</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mi>x</mml:mi>
</mml:mfenced>
<mml:mo>=</mml:mo>
<mml:mo>max</mml:mo>
<mml:mfenced open="(" close=")" separators=",">
<mml:mn>0</mml:mn>
<mml:mi>x</mml:mi>
</mml:mfenced>
</mml:math>
</disp-formula>Pooling layers reduce the spatial dimensions (width and height) of the feature maps, and this may help reduce computational complexity and control overfitting as shown in <xref ref-type="disp-formula" rid="EQ11">Equation 8</xref>.<disp-formula id="EQ11">
<label>(8)</label>
<mml:math id="M19">
<mml:msubsup>
<mml:mi>f</mml:mi>
<mml:mi>k</mml:mi>
<mml:mi mathvariant="italic">pool</mml:mi>
</mml:msubsup>
<mml:mfenced open="(" close=")" separators=",">
<mml:mi>i</mml:mi>
<mml:mi>j</mml:mi>
</mml:mfenced>
<mml:mo>=</mml:mo>
<mml:munder>
<mml:mo>max</mml:mo>
<mml:mrow>
<mml:mi>m</mml:mi>
<mml:mo>,</mml:mo>
<mml:mi>n</mml:mi>
</mml:mrow>
</mml:munder>
<mml:msub>
<mml:mi>f</mml:mi>
<mml:mi>k</mml:mi>
</mml:msub>
<mml:mfenced open="(" close=")">
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>m</mml:mi>
<mml:mo>,</mml:mo>
<mml:mi>j</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>n</mml:mi>
</mml:mrow>
</mml:mfenced>
</mml:math>
</disp-formula>The output <inline-formula>
<mml:math id="M20">
<mml:mi>y</mml:mi>
</mml:math>
</inline-formula>from a fully connected layer can be expressed as in <xref ref-type="disp-formula" rid="EQ12">Equation 9</xref>:<disp-formula id="EQ12">
<label>(9)</label>
<mml:math id="M21">
<mml:mi>y</mml:mi>
<mml:mo>=</mml:mo>
<mml:mi>f</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mrow>
<mml:mi>W</mml:mi>
<mml:mi>x</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>b</mml:mi>
</mml:mrow>
</mml:mfenced>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M22">
<mml:mi>W</mml:mi>
</mml:math>
</inline-formula> is the weight matrix, <inline-formula>
<mml:math id="M23">
<mml:mi>x</mml:mi>
</mml:math>
</inline-formula> is the input vector, <inline-formula>
<mml:math id="M24">
<mml:mi>b</mml:mi>
</mml:math>
</inline-formula> is the bias vector, and <inline-formula>
<mml:math id="M25">
<mml:mi>f</mml:mi>
</mml:math>
</inline-formula> is the activation function.</p>
<p>The softmax function for an output vector <inline-formula>
<mml:math id="M26">
<mml:mi>z</mml:mi>
</mml:math>
</inline-formula> is defined as in <xref ref-type="disp-formula" rid="EQ13">Equation 10</xref>:<disp-formula id="EQ13">
<label>(10)</label>
<mml:math id="M27">
<mml:mi>&#x03C3;</mml:mi>
<mml:msub>
<mml:mfenced open="(" close=")">
<mml:mi>z</mml:mi>
</mml:mfenced>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:msup>
<mml:mi>e</mml:mi>
<mml:mrow>
<mml:mi>z</mml:mi>
<mml:mi>i</mml:mi>
</mml:mrow>
</mml:msup>
<mml:mrow>
<mml:msub>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x2211;</mml:mo>
</mml:mstyle>
<mml:mi>j</mml:mi>
</mml:msub>
<mml:msup>
<mml:mi>e</mml:mi>
<mml:mrow>
<mml:mi>z</mml:mi>
<mml:mi>j</mml:mi>
</mml:mrow>
</mml:msup>
</mml:mrow>
</mml:mfrac>
</mml:math>
</disp-formula>The gradient descent update rule for a weight <inline-formula>
<mml:math id="M28">
<mml:mi>W</mml:mi>
</mml:math>
</inline-formula> is given by <xref ref-type="disp-formula" rid="EQ14">Equation 11</xref>:<disp-formula id="EQ14">
<label>(11)</label>
<mml:math id="M29">
<mml:mi>W</mml:mi>
<mml:mo>&#x2190;</mml:mo>
<mml:mi>W</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>&#x03B7;</mml:mi>
<mml:mfrac>
<mml:mrow>
<mml:mo>&#x2202;</mml:mo>
<mml:mi>L</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mo>&#x2202;</mml:mo>
<mml:mi>W</mml:mi>
</mml:mrow>
</mml:mfrac>
<mml:mi>&#x03C3;</mml:mi>
<mml:msub>
<mml:mfenced open="(" close=")">
<mml:mi>z</mml:mi>
</mml:mfenced>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:msup>
<mml:mi>e</mml:mi>
<mml:mrow>
<mml:mi>z</mml:mi>
<mml:mi>i</mml:mi>
</mml:mrow>
</mml:msup>
<mml:mrow>
<mml:msub>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x2211;</mml:mo>
</mml:mstyle>
<mml:mi>j</mml:mi>
</mml:msub>
<mml:msup>
<mml:mi>e</mml:mi>
<mml:mrow>
<mml:mi>z</mml:mi>
<mml:mi>j</mml:mi>
</mml:mrow>
</mml:msup>
</mml:mrow>
</mml:mfrac>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M30">
<mml:mi>&#x03B7;</mml:mi>
</mml:math>
</inline-formula> is the learning rate.</p>
<p>Mathematically, the continuous wavelet transforms (CWT) of a signal <inline-formula>
<mml:math id="M31">
<mml:mi>x</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
</mml:math>
</inline-formula> is given by <xref ref-type="disp-formula" rid="EQ15">Equation 12</xref>:<disp-formula id="EQ15">
<label>(12)</label>
<mml:math id="M32">
<mml:mi>W</mml:mi>
<mml:mfenced open="(" close=")" separators=",">
<mml:mi>a</mml:mi>
<mml:mi>b</mml:mi>
</mml:mfenced>
<mml:mo>=</mml:mo>
<mml:munderover>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x222B;</mml:mo>
</mml:mstyle>
<mml:mrow>
<mml:mo>&#x2212;</mml:mo>
<mml:mo>&#x221E;</mml:mo>
</mml:mrow>
<mml:mo>&#x221E;</mml:mo>
</mml:munderover>
<mml:mi>x</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:msqrt>
<mml:mi>a</mml:mi>
</mml:msqrt>
</mml:mfrac>
<mml:mi>&#x03C8;</mml:mi>
<mml:mo stretchy="false">(</mml:mo>
<mml:mfrac>
<mml:mfenced open="(" close=")">
<mml:mrow>
<mml:mi>t</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>b</mml:mi>
</mml:mrow>
</mml:mfenced>
<mml:mi>a</mml:mi>
</mml:mfrac>
<mml:mi>d</mml:mi>
<mml:mi>t</mml:mi>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M33">
<mml:mi>a</mml:mi>
</mml:math>
</inline-formula> is the scaling parameter, <inline-formula>
<mml:math id="M34">
<mml:mi>b</mml:mi>
</mml:math>
</inline-formula> is the translation parameter, and <inline-formula>
<mml:math id="M35">
<mml:mi>&#x03C8;</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
</mml:math>
</inline-formula> is the mother wavelet.</p>
<p>Basic processing units that apply a weighted sum of inputs followed by an activation function. For a neuron <inline-formula>
<mml:math id="M36">
<mml:mi>j</mml:mi>
</mml:math>
</inline-formula>, the output <inline-formula>
<mml:math id="M37">
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:math>
</inline-formula> is expressed in <xref ref-type="disp-formula" rid="EQ17">Equation 13</xref>:<disp-formula id="EQ17">
<label>(13)</label>
<mml:math id="M38">
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mi>f</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mrow>
<mml:munderover>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x2211;</mml:mo>
</mml:mstyle>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>n</mml:mi>
</mml:munderover>
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mi>j</mml:mi>
</mml:mrow>
</mml:msub>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:msub>
<mml:mi>b</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M39">
<mml:msub>
<mml:mi>w</mml:mi>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mi>j</mml:mi>
</mml:mrow>
</mml:msub>
</mml:math>
</inline-formula> are the weights, &#x1D465;&#x1D456; are the inputs, <inline-formula>
<mml:math id="M40">
<mml:msub>
<mml:mi>b</mml:mi>
<mml:mi>j</mml:mi>
</mml:msub>
</mml:math>
</inline-formula>is the bias, and <inline-formula>
<mml:math id="M41">
<mml:mi>f</mml:mi>
</mml:math>
</inline-formula> is the activation function.</p>
<p>There is a tightly defined algorithm for how the system works, what data the system gathers, how it preprocesses the data, how the AI models analyze the data, how it detects anomalies, how it generates alerts, and how the system improves itself continuously. They make sure that there is a systematic approach to monitoring and response that targets elderly care within smart home requirements.</p>
<p>An ER diagram shows a relationship between system entities such as Participants, Sensor Data, Health Metrics, Environmental Data, Alerts, and Caregivers. These entities and attributes comprise data structures and help in managing and interpreting the flow of data within the system.</p>
<p>Interactions described in the Sequence Diagram explain how the software works in terms of operations from data acquisition, through the formation of an alert and subsequent action. It explains how sensors, the central hub, other parts of the IoMT cloud, and caregivers interact with one another, thus providing a good reference for what is inside the system and how they work together.</p>
<p>Architectural details of the CNN and WANN models describe their configurations and functionalities in the system. CNNs are ideal for image and video data analysis specifically developed for fall detection and activity recognition. In contrast, WANNs study time series data for the prognosis of health abnormalities, and give specific information needed in personalized elderly care. Sensors have to be chosen carefully, and deployed in appropriate manners and the data collected by them has to be transmitted in optimum methods. Vital signs are continuously recorded using wearable sensors, while environmental sensors check for the livability of spaces. Specifically, data collection, communication, and verification steps are fundamental in ensuring the quality and accuracy of data for subsequent analysis.</p>
<p>Some of the data processing steps include denoising, normalization, and segmentation of data which is crucial before feeding the sensor data into an AI model. Cross-validation checks and model training are performed to make AI algorithms more reliable and accurate to detect and capture anomalies and hazards in real-time mode.</p>
<p>A simple low-pass filter can be expressed as in <xref ref-type="disp-formula" rid="EQ18">Equation 14</xref>:<disp-formula id="EQ18">
<label>(14)</label>
<mml:math id="M42">
<mml:mi>y</mml:mi>
<mml:mfenced open="[" close="]">
<mml:mi>n</mml:mi>
</mml:mfenced>
<mml:mo>=</mml:mo>
<mml:mi>&#x03B1;</mml:mi>
<mml:mo>.</mml:mo>
<mml:mi>x</mml:mi>
<mml:mfenced open="[" close="]">
<mml:mi>n</mml:mi>
</mml:mfenced>
<mml:mo>+</mml:mo>
<mml:mfenced open="(" close=")">
<mml:mrow>
<mml:mn>1</mml:mn>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>&#x03B1;</mml:mi>
</mml:mrow>
</mml:mfenced>
<mml:mo>.</mml:mo>
<mml:mi>y</mml:mi>
<mml:mfenced open="[" close="]">
<mml:mrow>
<mml:mi>n</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
</mml:mfenced>
</mml:math>
</disp-formula>Normalization is essential to scale the data from different sensors to a common range, which is expressed as in <xref ref-type="disp-formula" rid="EQ19">Equation 15</xref>:<disp-formula id="EQ19">
<label>(15)</label>
<mml:math id="M43">
<mml:msup>
<mml:mi>x</mml:mi>
<mml:mo>&#x2032;</mml:mo>
</mml:msup>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>x</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mtext>min</mml:mtext>
</mml:msub>
</mml:mrow>
<mml:mrow>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mtext>max</mml:mtext>
</mml:msub>
<mml:mo>&#x2212;</mml:mo>
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mtext>min</mml:mtext>
</mml:msub>
</mml:mrow>
</mml:mfrac>
</mml:math>
</disp-formula>If <inline-formula>
<mml:math id="M44">
<mml:mi>X</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
</mml:math>
</inline-formula> represents the continuous data stream, it can be segmented into windows <inline-formula>
<mml:math id="M45">
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:math>
</inline-formula> as follows in <xref ref-type="disp-formula" rid="EQ20">Equation 16</xref>:<disp-formula id="EQ20">
<label>(16)</label>
<mml:math id="M46">
<mml:msub>
<mml:mi>W</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mfenced close="}" open="{">
<mml:mrow>
<mml:mi>x</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mi>t</mml:mi>
</mml:mfenced>
<mml:mo stretchy="true">|</mml:mo>
<mml:mi mathvariant="normal">t</mml:mi>
<mml:mo>&#x2208;</mml:mo>
<mml:mfenced close="]" open="[">
<mml:mrow>
<mml:msub>
<mml:mi>t</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>t</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mo>+</mml:mo>
<mml:mi>&#x0394;</mml:mi>
<mml:mi>t</mml:mi>
</mml:mrow>
</mml:mfenced>
</mml:mrow>
</mml:mfenced>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M47">
<mml:mi>&#x0394;</mml:mi>
<mml:mi>t</mml:mi>
</mml:math>
</inline-formula> is the window length and <inline-formula>
<mml:math id="M48">
<mml:msub>
<mml:mi>t</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:math>
</inline-formula> is the starting time of the <inline-formula>
<mml:math id="M49">
<mml:mi>i</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>t</mml:mi>
<mml:mi>h</mml:mi>
</mml:math>
</inline-formula> window.</p>
<p>The loss function <inline-formula>
<mml:math id="M50">
<mml:mi>L</mml:mi>
</mml:math>
</inline-formula> for a model with parameters <inline-formula>
<mml:math id="M51">
<mml:mi>&#x03B8;</mml:mi>
</mml:math>
</inline-formula> can be defined as: in <xref ref-type="disp-formula" rid="EQ22">Equation 17</xref><disp-formula id="EQ22">
<label>(17)</label>
<mml:math id="M52">
<mml:mi>L</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mi>&#x03B8;</mml:mi>
</mml:mfenced>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mi>N</mml:mi>
</mml:mfrac>
<mml:munderover>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x2211;</mml:mo>
</mml:mstyle>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>N</mml:mi>
</mml:munderover>
<mml:mi mathvariant="script">L</mml:mi>
<mml:mfenced open="(" close=")">
<mml:mrow>
<mml:mi>f</mml:mi>
<mml:mfenced open="(" close=")" separators=";">
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mi>&#x03B8;</mml:mi>
</mml:mfenced>
<mml:mo>,</mml:mo>
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:mrow>
</mml:mfenced>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M53">
<mml:mi>N</mml:mi>
</mml:math>
</inline-formula> is the number of training samples, <inline-formula>
<mml:math id="M54">
<mml:mi mathvariant="script">L</mml:mi>
</mml:math>
</inline-formula> is the loss function (e.g., mean squared error for regression or cross-entropy for classification), <inline-formula>
<mml:math id="M55">
<mml:mi>f</mml:mi>
<mml:mfenced open="(" close=")" separators=";">
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
<mml:mi>&#x03B8;</mml:mi>
</mml:mfenced>
</mml:math>
</inline-formula> is the model&#x2019;s prediction for input <inline-formula>
<mml:math id="M56">
<mml:msub>
<mml:mi>x</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:math>
</inline-formula>, and <inline-formula>
<mml:math id="M57">
<mml:msub>
<mml:mi>y</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:math>
</inline-formula> is the true label.</p>
<p>Accuracy measures the proportion of correctly classified instances out of the total instances as shown in <xref ref-type="disp-formula" rid="EQ23">Equation 18</xref>.<disp-formula id="EQ23">
<label>(18)</label>
<mml:math id="M58">
<mml:mi mathvariant="italic">Accuracy</mml:mi>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>T</mml:mi>
<mml:mi>N</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>T</mml:mi>
<mml:mi>N</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>F</mml:mi>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>F</mml:mi>
<mml:mi>N</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M59">
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
</mml:math>
</inline-formula> is true positives, <inline-formula>
<mml:math id="M60">
<mml:mi>T</mml:mi>
<mml:mi>N</mml:mi>
</mml:math>
</inline-formula> is true negatives, <inline-formula>
<mml:math id="M61">
<mml:mi>F</mml:mi>
<mml:mi>P</mml:mi>
</mml:math>
</inline-formula> is false positives, and <inline-formula>
<mml:math id="M62">
<mml:mi>F</mml:mi>
<mml:mi>N</mml:mi>
</mml:math>
</inline-formula> is false negatives.</p>
<p>Precision in <xref ref-type="disp-formula" rid="EQ24">Equation 19</xref> indicates the proportion of true positive predictions out of all positive predictions.<disp-formula id="EQ24">
<label>(19)</label>
<mml:math id="M63">
<mml:mi mathvariant="italic">Precision</mml:mi>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>F</mml:mi>
<mml:mi>P</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:math>
</disp-formula>Recall in <xref ref-type="disp-formula" rid="EQ25">Equation 20</xref> measures the proportion of true positives out of all actual positives.<disp-formula id="EQ25">
<label>(20)</label>
<mml:math id="M64">
<mml:mi mathvariant="italic">Recall</mml:mi>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi>T</mml:mi>
<mml:mi>P</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi>F</mml:mi>
<mml:mi>N</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:math>
</disp-formula>The harmonic means of precision and recall as described in <xref ref-type="disp-formula" rid="EQ26">Equation 21</xref>, providing a single metric that balances both.<disp-formula id="EQ26">
<label>(21)</label>
<mml:math id="M65">
<mml:mi>F</mml:mi>
<mml:msub>
<mml:mn>1</mml:mn>
<mml:mi mathvariant="italic">Score</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mn>2</mml:mn>
<mml:mo>&#x00B7;</mml:mo>
<mml:mfrac>
<mml:mrow>
<mml:mi mathvariant="italic">Precision</mml:mi>
<mml:mo>&#x00B7;</mml:mo>
<mml:mi mathvariant="italic">Recall</mml:mi>
</mml:mrow>
<mml:mrow>
<mml:mi mathvariant="italic">Precision</mml:mi>
<mml:mo>+</mml:mo>
<mml:mi mathvariant="italic">Recall</mml:mi>
</mml:mrow>
</mml:mfrac>
</mml:math>
</disp-formula>The average performance across all folds is computed to assess the model&#x2019;s robustness as in <xref ref-type="disp-formula" rid="EQ27">Equation 22</xref>:<disp-formula id="EQ27">
<label>(22)</label>
<mml:math id="M66">
<mml:mi>C</mml:mi>
<mml:msub>
<mml:mi>V</mml:mi>
<mml:mi mathvariant="italic">Score</mml:mi>
</mml:msub>
<mml:mo>=</mml:mo>
<mml:mfrac>
<mml:mn>1</mml:mn>
<mml:mi>k</mml:mi>
</mml:mfrac>
<mml:munderover>
<mml:mstyle displaystyle="true">
<mml:mo stretchy="true">&#x2211;</mml:mo>
</mml:mstyle>
<mml:mrow>
<mml:mi>i</mml:mi>
<mml:mo>=</mml:mo>
<mml:mn>1</mml:mn>
</mml:mrow>
<mml:mi>k</mml:mi>
</mml:munderover>
<mml:mi mathvariant="italic">Scor</mml:mi>
<mml:msub>
<mml:mi>e</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:math>
</disp-formula>where <inline-formula>
<mml:math id="M67">
<mml:mi mathvariant="italic">Scor</mml:mi>
<mml:msub>
<mml:mi>e</mml:mi>
<mml:mi>i</mml:mi>
</mml:msub>
</mml:math>
</inline-formula> is the performance metric for the <inline-formula>
<mml:math id="M68">
<mml:mi>i</mml:mi>
<mml:mo>&#x2212;</mml:mo>
<mml:mi>t</mml:mi>
<mml:mi>h</mml:mi>
</mml:math>
</inline-formula> fold.</p>
<p>In hardware implementation microcontrollers, sensors, communication modules, and microprocessor central control hubs must be connected to ensure data flow and data processing occur without a hitch. It proposes this setup that can aid in the real-time monitoring and analysis needed to improve elderly care within IoT smart homes using AI IoT technologies.</p>
<p>The methodology allows for providing the scheme of the adaptive AI approach for using IoT technologies in smart homes for elderly people. Due to the use of up-to-date technology in acquiring data from the sensors, applying the AI model, and adopting of cloud computing services, the system seeks to contribute to the safety and well-being of the elderly and the care they receive with greater efficiency from the caregiving and the medical personnel as well.</p>
</sec>
</sec>
<sec id="sec11">
<label>3.3</label>
<title>Ethical consideration</title>
<p>The different privacy protection methods will be developed to amass the ethical considerations of the study. The proposed system will apply anonymization techniques that will remove PII and replace it with unique, non-traceable identifiers. Besides this, AES-256 shall be applied at transmission and storage to ensure data handling securely. It also utilizes GANs to synthesize artificial datasets representative of real health data for training models without the use of actual information from users. The RBAC limits access to such data by regulating personnel with only authorized roles, ensuring ethical standards and improving security in this IoT-enabled framework. These measures together address all issues on privacy and ensure ethical handling of health information.</p>
<p>For the research, GANs for data synthesis, CNNs for image analysis, and WANNs for time-series data were chosen after careful consideration against various options. While RNNs and LSTMs are widely used in time-series data for the capabilities they provide in capturing temporal dependencies, WANNs had to be chosen for this problem because of their computational efficiency and the ability to process high-frequency signals that will be critical for real-time health anomaly detection. GANs were adopted for synthesizing data because it generates high-quality, realistic data with the ability to address the class imbalance problem, which is not possible with traditional oversampling techniques. In image analysis, CNNs have been chosen because CNNs are robust in representing spatial features, which in turn provide very good efficiency in detecting falls and ambient conditions. These model selections are therefore in line with the requirements of the system in real-time performance, accuracy, and adaptability over the IoT-enabled framework.</p>
<p>During system development, machine learning frameworks, including TensorFlow and PyTorch, were utilized to implement and train the GANs and other AI models. For data analysis, AI-driven statistical tools such as MATLAB were used to evaluate performance metrics and generate confusion matrices. No Generative AI tools has been used in developing or writing this study.</p>
</sec>
</sec>
<sec sec-type="results" id="sec12">
<label>4</label>
<title>Results and discussion</title>
<p>The results report the findings associated with the implementation and testing of the developed adaptive AI system for personalized elderly care in IoT-enabled smart homes. It will also show the system performance using different evaluation metrics, and the important observations and results are analyzed comprehensively. This comparative analysis establishes that the proposed system performs better than the existing solutions in terms of things like response time, which is quite critical to deal with in real-time elderly care applications. The superior performance mostly validates the efficiency of the system and infuses credibility into its potentially robust solution capability for IoT-enabled personalized elderly care.</p>
<p>Another key trade-off that IoT-based adaptive systems face is between the use of real-time data and the energy consumption associated with IoT devices. In principle, while continuous monitoring ensures the timely detection of health anomalies or hazards in the environment, the energy cost can lead to limitations regarding device longevity and scalability in smart home settings. Such will then be countered by the implementation of energy-efficient strategies like adaptive sampling rates; based on the activity level or other anomalies, it varies the frequency at which it takes data. A concrete example is the way a wearable sensor lowers its sample capture frequencies whenever the subject remains in a low activity level and intends to save the battery lifetime while maintaining the capture of information at a high risk.</p>
<p>With these comes the introduction of edge computing to process most data locally and reduce further communication with the cloud, thus reducing more power consumption. These are just some of the many trade-offs that are involved in ensuring that this system will scale well; the design considerations towards doing intensive monitoring without sacrificing device efficiency and longevity.</p>
<sec id="sec13">
<label>4.1</label>
<title>Simulation results performance metrics</title>
<p>The simulation tests are conducted to test the performance of the new AI model in aspects regarding accuracy, precision, recall, F1-score, false positive rate, and false negative rate. The results are summarized in <xref ref-type="table" rid="tab2">Table 1</xref>.</p>
<table-wrap position="float" id="tab2">
<label>Table 1</label>
<caption>
<p>Simulation results.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">CNN (fall detection)</th>
<th align="center" valign="top">WANN (health anomaly detection)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">95.3%</td>
<td align="center" valign="top">94.8%</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">93.7%</td>
<td align="center" valign="top">92.5%</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">91.2%</td>
<td align="center" valign="top">90.4%</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">92.4%</td>
<td align="center" valign="top">91.4%</td>
</tr>
<tr>
<td align="left" valign="top">False positive rate</td>
<td align="center" valign="top">2.8%</td>
<td align="center" valign="top">3.1%</td>
</tr>
<tr>
<td align="left" valign="top">False negative rate</td>
<td align="center" valign="top">2.7%</td>
<td align="center" valign="top">3.3%</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>For many weeks, elderly people volunteers&#x2019; houses were utilized for field testing to evaluate the system&#x2019;s practical performance. The outcomes can be shown in <xref ref-type="table" rid="tab3">Table 2</xref>.</p>
<table-wrap position="float" id="tab3">
<label>Table 2</label>
<caption>
<p>Field test results.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">CNN (fall detection)</th>
<th align="center" valign="top">WANN (health anomaly detection)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">94.1%</td>
<td align="center" valign="top">93.6%</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">91.8%</td>
<td align="center" valign="top">90.9%</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">89.5%</td>
<td align="center" valign="top">88.7%</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">90.6%</td>
<td align="center" valign="top">89.8%</td>
</tr>
<tr>
<td align="left" valign="top">False positive rate</td>
<td align="center" valign="top">3.2%</td>
<td align="center" valign="top">3.5%</td>
</tr>
<tr>
<td align="left" valign="top">False negative rate</td>
<td align="center" valign="top">3.7%</td>
<td align="center" valign="top">4.1%</td>
</tr>
</tbody>
</table>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th/>
<th align="center" valign="top">Predicted fall</th>
<th align="center" valign="top">Predicted no fall</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Actual fall</td>
<td align="center" valign="top">89</td>
<td align="center" valign="top">1</td>
</tr>
<tr>
<td align="left" valign="top">Actual no fall</td>
<td align="center" valign="top">1</td>
<td align="center" valign="top">109</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="sec14">
<label>4.2</label>
<title>Case studies</title>
<sec id="sec15">
<label>4.2.1</label>
<title>Case study 1: fall detection</title>
<p>The confusion matrix, as illustrated in <xref ref-type="fig" rid="fig4">Figure 4</xref>, provides a comprehensive representation of the model&#x2019;s performance in accurately classifying hazardous and non-hazardous conditions. It documents the number of true positives, true negatives, false positives, and false negatives, thereby enabling a detailed analysis of the model&#x2019;s ability to detect falls effectively.</p>
<fig position="float" id="fig4">
<label>Figure 4</label>
<caption>
<p>Fall detection confusion matrix: illustrating the system&#x2019;s classification performance by detailing the counts of true positives (correctly identified critical events), false positives (incorrectly flagged events), true negatives (correctly identified non-critical cases), and false negatives (missed critical events).</p>
</caption>
<graphic xlink:href="frai-08-1520592-g004.tif"/>
</fig>
<p>As shown in <xref ref-type="table" rid="tab4">Table 3</xref>, the model correctly identified 89 instances of falls and 109 instances of no falls, while misclassifying only 1 instance in each category. In addition to accuracy, precision, recall, and F1-score, sensitivity and specificity metrics were calculated to provide a deeper understanding of the model&#x2019;s performance. Sensitivity measures the model&#x2019;s ability to correctly detect fall events, while specificity evaluates its ability to avoid false alarms by correctly identifying non-fall instances. The performance metrics derived from the confusion matrix, underscore the model&#x2019;s high accuracy and reliability in fall detection. With an accuracy of 99.1%, precision and recall both at 98.9%, and an F1-Score of 98.9%, the model demonstrates exceptional effectiveness.</p>
<table-wrap position="float" id="tab4">
<label>Table 3</label>
<caption>
<p>Fall detection confusion matrix.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">Value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">99.1%</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">98.9%</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">98.9%</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">98.9%</td>
</tr>
<tr>
<td align="left" valign="top">Response time</td>
<td align="center" valign="top">2.5&#x202F;s</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>Additionally, the response time of 2.5&#x202F;s highlights the model&#x2019;s efficiency in real-time fall detection scenarios. The results show that the average response time for Fall Detection events is 2.5&#x202F;s, while the average response time for Alert Triggers is slightly higher at 3.0&#x202F;s.</p>
<p>In addition to testing the system&#x2019;s response time, we examined the distribution of response times for Fall Detection events and Alert Triggers. As illustrated in <xref ref-type="fig" rid="fig5">Figure 5</xref>, the response times were measured and compared for both types of events.</p>
<fig position="float" id="fig5">
<label>Figure 5</label>
<caption>
<p>Comparison of response times for fall detection and alert triggering.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g005.tif"/>
</fig>
<p>To assess the effectiveness of the proposed system, a comparison was made with existing fall detection systems. The performance metrics, as outlined in <xref ref-type="table" rid="tab5">Table 4</xref>, demonstrate that the proposed system significantly outperforms the existing solutions in terms of accuracy, precision, recall, and F1-Score. Additionally, the proposed system has a faster response time.</p>
<table-wrap position="float" id="tab5">
<label>Table 4</label>
<caption>
<p>Comparison with the existing solutions.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">Proposed system</th>
<th align="center" valign="top">Existing solutions</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">99.1%</td>
<td align="center" valign="top">90.1%</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">98.9%</td>
<td align="center" valign="top">89.9%</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">98.9%</td>
<td align="center" valign="top">89.9%</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">98.9%</td>
<td align="center" valign="top">89.9%</td>
</tr>
<tr>
<td align="left" valign="top">Response time (seconds)</td>
<td align="center" valign="top">2.5&#x202F;s</td>
<td align="center" valign="top">3.00</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="sec16">
<label>4.2.2</label>
<title>Case study 2: health anomaly detection</title>
<p>The confusion matrix provides an essential overview of the model&#x2019;s performance in accurately classifying health anomalies and normal conditions. The model correctly identified 88 instances of anomalies and 108 instances of normal conditions, while misclassifying only 2 instances in each category. This documentation highlights the model&#x2019;s ability to distinguish between hazardous and non-hazardous health conditions effectively.</p>
<p><xref ref-type="fig" rid="fig6">Figure 6</xref> illustrates the confusion matrix for health anomaly detection, providing a visual representation of the model&#x2019;s classification performance. It helps to easily identify the number of true positives, true negatives, false positives, and false negatives, offering a clear understanding of the model&#x2019;s strengths and areas for improvement. Sensitivity and specificity metrics further demonstrate the robustness of the system in identifying health anomalies while minimizing false positives and false negatives. The <xref ref-type="table" rid="tab6">Table 5</xref> shows the performance metrics that were obtained from the confusion matrix:</p>
<fig position="float" id="fig6">
<label>Figure 6</label>
<caption>
<p>Health anomaly detection confusion matrix: illustrating the system&#x2019;s classification performance by detailing the counts of true positives (correctly identified critical events), false positives (incorrectly flagged events), true negatives (correctly identified non-critical cases), and false negatives (missed critical events).</p>
</caption>
<graphic xlink:href="frai-08-1520592-g006.tif"/>
</fig>
<table-wrap position="float" id="tab6">
<label>Table 5</label>
<caption>
<p>Health anomaly detection performance matrix.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">Value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">98.0%</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">97.8%</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">97.8%</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">97.8%</td>
</tr>
<tr>
<td align="left" valign="top">Response time</td>
<td align="center" valign="top">2.8&#x202F;s</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>In addition to testing the system&#x2019;s response time, the distribution of response times for health anomaly events was analyzed, as described in <xref ref-type="table" rid="tab7">Table 6</xref>. The average response time for Anomaly Detection events is 2.8&#x202F;s, while the response time for Alert Triggers is slightly higher at 3.2&#x202F;s.</p>
<table-wrap position="float" id="tab7">
<label>Table 6</label>
<caption>
<p>Response time distribution.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Event</th>
<th align="center" valign="top">Response time (seconds)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Anomaly detection</td>
<td align="center" valign="top">2.8</td>
</tr>
<tr>
<td align="left" valign="top">Alert trigger</td>
<td align="center" valign="top">3.2</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="fig" rid="fig7">Figure 7</xref> provides a box plot of response times for Anomaly Detection and Alert Triggers, visually depicting the distribution and variability of response times for each event type.</p>
<fig position="float" id="fig7">
<label>Figure 7</label>
<caption>
<p>Box plot of response times for anomaly detection and alert triggering.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g007.tif"/>
</fig>
<p>To assess the effectiveness of the proposed system for better comparison, the proposed system was compared with the existing fall detection system in <xref ref-type="table" rid="tab8">Table 7</xref>.</p>
<table-wrap position="float" id="tab8">
<label>Table 7</label>
<caption>
<p>Comparison with the existing solutions.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">Proposed system</th>
<th align="center" valign="top">Existing solutions</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">98.0%</td>
<td align="center" valign="top">90.0%</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">97.8%</td>
<td align="center" valign="top">89.0%</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">97.8%</td>
<td align="center" valign="top">88.0%</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">97.8%</td>
<td align="center" valign="top">88.5%</td>
</tr>
<tr>
<td align="left" valign="top">Response time (seconds)</td>
<td align="center" valign="top">2.8</td>
<td align="center" valign="top">6.0</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="sec17">
<label>4.2.3</label>
<title>Case study 3: environmental monitoring</title>
<p>Besides gas leakage, the system also monitored temperature, humidity, and dangerous gases such as carbon monoxide and methane. These factors were selected in consideration of their important implications on elderly health and safety. For instance, abnormal temperature levels can exacerbate chronic disease conditions while high levels of humidity might precipitate respiratory diseases. Pernicious gases pose certain risks which include poisoning and building explosion. Integration of these environmental parameters enables this system to ensure comprehensive monitoring for the detection and addressing of the safety concerns that exist in the real world upfront for enhancing overall livability and security for IoT-enabled smart houses looking after elderly individuals. The confusion matrix enables documenting various aspects of the model&#x2019;s ability or inability to correctly classify hazardous and non-hazardous conditions accurately as shown in <xref ref-type="fig" rid="fig8">Figure 8</xref>.</p>
<fig position="float" id="fig8">
<label>Figure 8</label>
<caption>
<p>Environmental monitoring confusion matrix: illustrating the system&#x2019;s classification performance by detailing the counts of true positives (correctly identified critical events), false positives (incorrectly flagged events), true negatives (correctly identified non-critical cases), and false negatives (missed critical events).</p>
</caption>
<graphic xlink:href="frai-08-1520592-g008.tif"/>
</fig>
<p>The following are the performance metrics that were obtained from the confusion matrix in <xref ref-type="table" rid="tab9">Table 8</xref>.</p>
<table-wrap position="float" id="tab9">
<label>Table 8</label>
<caption>
<p>Environmental monitoring performance matrix.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">Value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">95.8%</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">93.8%</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">90.0%</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">91.8%</td>
</tr>
<tr>
<td align="left" valign="top">Response time</td>
<td align="center" valign="top">2.0&#x202F;s</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>In addition to testing the system&#x2019;s response time, the distribution of response times for Environmental Monitoring events was analyzed, as described in <xref ref-type="table" rid="tab10">Table 9</xref>. The average response time for Hazard Detection events is 2.0&#x202F;s, while the response time for Alert Triggers is 2.5&#x202F;s.</p>
<table-wrap position="float" id="tab10">
<label>Table 9</label>
<caption>
<p>Response time distribution.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Event</th>
<th align="center" valign="top">Response time (seconds)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Hazard detection</td>
<td align="center" valign="top">2.0</td>
</tr>
<tr>
<td align="left" valign="top">Alert trigger</td>
<td align="center" valign="top">2.5</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="fig" rid="fig9">Figure 9</xref> illustrates the response time distribution for both Hazard Detection and Alert Triggers, providing a clear visualization of how response times vary for these events.</p>
<fig position="float" id="fig9">
<label>Figure 9</label>
<caption>
<p>Response time distribution for both hazard detection and alert triggers.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g009.tif"/>
</fig>
<p>To evaluate the effectiveness of the proposed system, a comparison was made with existing fall detection systems. As shown in <xref ref-type="table" rid="tab11">Table 10</xref>, the proposed system significantly outperforms existing solutions in terms of accuracy, precision, recall, and F1-Score, while also offering a faster response time.</p>
<table-wrap position="float" id="tab11">
<label>Table 10</label>
<caption>
<p>A comparison with the existing solutions.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">Proposed system</th>
<th align="center" valign="top">Existing solutions</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy</td>
<td align="center" valign="top">95.8%</td>
<td align="center" valign="top">87.1%</td>
</tr>
<tr>
<td align="left" valign="top">Precision</td>
<td align="center" valign="top">93.8%</td>
<td align="center" valign="top">85.3%</td>
</tr>
<tr>
<td align="left" valign="top">Recall</td>
<td align="center" valign="top">90.0%</td>
<td align="center" valign="top">81.8%</td>
</tr>
<tr>
<td align="left" valign="top">F1-score</td>
<td align="center" valign="top">91.8%</td>
<td align="center" valign="top">83.5%</td>
</tr>
<tr>
<td align="left" valign="top">Sensitivity</td>
<td align="center" valign="top">99.0%</td>
<td align="center" valign="top">89.0%</td>
</tr>
<tr>
<td align="left" valign="top">Specificity</td>
<td align="center" valign="top">98.8%</td>
<td align="center" valign="top">87.2%</td>
</tr>
<tr>
<td align="left" valign="top">Response time (seconds)</td>
<td align="center" valign="top">2.0&#x202F;s</td>
<td align="center" valign="top">2.6</td>
</tr>
</tbody>
</table>
</table-wrap>
<p><xref ref-type="fig" rid="fig10">Figure 10</xref> provides a visual comparative analysis, illustrating the superior performance of the proposed system across all evaluated metrics.</p>
<fig position="float" id="fig10">
<label>Figure 10</label>
<caption>
<p>Comparison with the existing solutions.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g010.tif"/>
</fig>
<p>To test the system&#x2019;s effectiveness, a gas leak scenario was simulated during the study period. The recorded gas levels over 35&#x202F;min of monitoring are shown in <xref ref-type="table" rid="tab12">Table 11</xref>, reflecting the changes in gas concentration during the event. <xref ref-type="fig" rid="fig11">Figure 11</xref> illustrates the changes in gas levels during the incident, demonstrating a peak at 3:15&#x202F;PM before gradually decreasing.</p>
<table-wrap position="float" id="tab12">
<label>Table 11</label>
<caption>
<p>Gas level changes during the event.</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Time</th>
<th align="center" valign="top">Gas level (ppm)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">3:00&#x202F;PM</td>
<td align="center" valign="top">20</td>
</tr>
<tr>
<td align="left" valign="top">3:10&#x202F;PM</td>
<td align="center" valign="top">30</td>
</tr>
<tr>
<td align="left" valign="top">3:15&#x202F;PM</td>
<td align="center" valign="top">150</td>
</tr>
<tr>
<td align="left" valign="top">3:20&#x202F;PM</td>
<td align="center" valign="top">80</td>
</tr>
<tr>
<td align="left" valign="top">3:30&#x202F;PM</td>
<td align="center" valign="top">40</td>
</tr>
<tr>
<td align="left" valign="top">3:35&#x202F;PM</td>
<td align="center" valign="top">20</td>
</tr>
</tbody>
</table>
</table-wrap>
<fig position="float" id="fig11">
<label>Figure 11</label>
<caption>
<p>Gas levels during the event.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g011.tif"/>
</fig>
<p>The system detected the gas leak at 3:15&#x202F;PM when the gas level spiked to 150&#x202F;ppm. An alert was raised, and appropriate measures were taken to control the hazard, and the gas level normalized within 20&#x202F;min.</p>
</sec>
</sec>
<sec id="sec18">
<label>4.3</label>
<title>Overall system performance</title>
<p>The overall system performance metrics of the IoT-enabled smart home for personalized elderly care demonstrate high effectiveness and reliability, as shown in the attached chart. The system achieved an accuracy of 98.0%, indicating a high rate of correct identifications for both true positives and true negatives. With a precision of 95.0% and a recall of 94.0%, the system effectively minimizes false alarms while ensuring that critical events are detected. The F1-score of 94.5% balances precision and recall, reflecting comprehensive performance. Additionally, the response time of 2.4&#x202F;s underscores the system&#x2019;s capability to generate timely alerts, crucial for immediate interventions in emergencies. These metrics highlight the system&#x2019;s potential to provide robust continuous monitoring and prompt response, enhancing the safety and well-being of elderly individuals through the integration of IoT and AI technologies.</p>
<p>The subsequent decline highlights areas needing improvement, particularly in maintaining consistent performance over time. Despite this, the system maintained high overall metrics, demonstrating its effectiveness in providing reliable and continuous monitoring for elderly care. Further efforts will focus on addressing the causes of performance degradation to ensure sustained high uptime and reliability. <xref ref-type="table" rid="tab13">Table 12</xref> provides a comprehensive comparison of the proposed system&#x2019;s performance metrics with existing solutions.</p>
<table-wrap position="float" id="tab13">
<label>Table 12</label>
<caption>
<p>Comparative performance of the proposed GAN-based AI system with existing IoT-enabled healthcare solutions: we-care (<xref ref-type="bibr" rid="ref31">Pinto et al., 2017</xref>) and an enhanced random forest-based fall detection system (<xref ref-type="bibr" rid="ref38">Subburam et al., 2024</xref>).</p>
</caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th align="left" valign="top">Metric</th>
<th align="center" valign="top">Proposed system</th>
<th align="center" valign="top">System A (we-care)</th>
<th align="center" valign="top">System B (enhanced random forest)</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left" valign="top">Accuracy (%)</td>
<td align="center" valign="top">98.0</td>
<td align="center" valign="top">91.0</td>
<td align="center" valign="top">91.0</td>
</tr>
<tr>
<td align="left" valign="top">Precision (%)</td>
<td align="center" valign="top">95.0</td>
<td align="center" valign="top">Not specified</td>
<td align="center" valign="top">92.0</td>
</tr>
<tr>
<td align="left" valign="top">Recall (%)</td>
<td align="center" valign="top">94.0</td>
<td align="center" valign="top">Not specified</td>
<td align="center" valign="top">92.0</td>
</tr>
<tr>
<td align="left" valign="top">F1-score (%)</td>
<td align="center" valign="top">94.5</td>
<td align="center" valign="top">Not specified</td>
<td align="center" valign="top">92.0</td>
</tr>
<tr>
<td align="left" valign="top">Response time (seconds)</td>
<td align="center" valign="top">2.4</td>
<td align="center" valign="top">Not specified</td>
<td align="center" valign="top">Not specified</td>
</tr>
<tr>
<td align="left" valign="top">Sensitivity (%)</td>
<td align="center" valign="top">97.5</td>
<td align="center" valign="top">Not specified</td>
<td align="center" valign="top">Not specified</td>
</tr>
<tr>
<td align="left" valign="top">Specificity (%)</td>
<td align="center" valign="top">96.8</td>
<td align="center" valign="top">Not specified</td>
<td align="center" valign="top">Not specified</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="sec19">
<label>4.4</label>
<title>Real world validation and performance under varying condition</title>
<p>Extensive field tests were conducted to evaluate the system&#x2019;s performance in diverse real-world scenarios. The tests involved 50 elderly users in smart home environments over a 6-month period, capturing data across varying environmental settings and user behaviors. The following key results were observed:<list list-type="order">
<list-item>
<p><bold>Environmental Variations</bold>:</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p><bold>High-Noise Settings</bold>: In environments with significant background noise (e.g., television or conversations), the system&#x2019;s fall detection accuracy decreased from 98.0 to 94.1%.</p>
</list-item>
<list-item>
<p><bold>Temperature Fluctuations</bold>: Inconsistent temperature and humidity levels resulted in a slight decline in health anomaly detection sensitivity, from 97.5 to 94.8%.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p><bold>User Behavior</bold>:</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p><bold>Erratic Movements</bold>: Users with irregular movement patterns (e.g., pacing) introduced variability in false-positive rates for fall detection, increasing by 2%.</p>
</list-item>
<list-item>
<p><bold>Sensor Placement</bold>: Improperly worn wearable devices (e.g., loose wristbands) reduced the precision of vital sign monitoring by 3%.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p><bold>Adaptation and Scalability</bold>:</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>The system demonstrated adaptability by recalibrating thresholds based on feedback, mitigating performance drops over time.</p>
</list-item>
<list-item>
<p>Edge AI deployment reduced response times by 15% compared to initial cloud-dependent configurations.</p>
</list-item>
</list></p>
<p>These findings highlight the robustness of the proposed system, while also identifying areas for improvement, such as advanced noise filtering techniques and improved user training for optimal sensor placement. By validating performance under real-world conditions, this study ensures the practical applicability and scalability of the system in diverse scenarios. Feedback from 50 elderly individuals and 20 caregivers over a six-month field test highlighted the system&#x2019;s usability and practical benefits. Among elderly users, 85% found the system easy to use, 90% reported an increased sense of security, and 80% felt it supported their independence. Caregivers noted a 75% reduction in workload due to timely alerts and actionable insights, with 88% expressing confidence in the system&#x2019;s reliability. Challenges included occasional difficulties in attaching wearable devices and the need for more customizable alert thresholds. Overall, the feedback underscores the system&#x2019;s usability, effectiveness, and areas for refinement to enhance user experience.</p>
</sec>
</sec>
<sec sec-type="discussion" id="sec20">
<label>5</label>
<title>Discussion</title>
<p>The findings obtained from the simulation and field-testing studies show that the CNN and WANN models produce high levels of accuracy, precision, recall, and F1 score. The comparatively low values of false positive and false negative represent a high accuracy of health anomalies and falls classification.</p>
<sec id="sec21">
<label>5.1</label>
<title>Simulation vs. field test performance (accuracy)</title>
<p>The proposed system exhibits practical robustness by comparing the simulation and field performance. While simulation results showed slightly higher metrics due to their controlled conditions, field tests revealed that the system is able to adapt to real-world scenarios. For instance, the fall detection system resulted in high accuracy of 94.1% and a recall of 89.5% in field conditions, hence proving efficiency in picking up true events even under changing environmental conditions. Similarly, health anomaly detection and environmental monitoring were able to recognize critical health risks and hazards with reliable performances of 93.6 and 95.8%, respectively. These results underpin the practical applicability to offer timely and accurate intervention in elderly care in IoT-enabled smart homes. As it can be regarded <xref ref-type="fig" rid="fig12">Figure 12A</xref>, the scores of the system during the actual field are a bit lower than the outcome of the simulation, but the system still boasts relatively high accuracy and reliability in the real environment. The performance parameters are expected to drop because the performance of an algorithm or a model is bound to be different in real-life conditions, which are highly stochastic and liable to fluctuations.</p>
<fig position="float" id="fig12">
<label>Figure 12</label>
<caption>
<p>Overall system performance <bold>(A)</bold> simulation vs. field test performance (accuracy), <bold>(B)</bold> fall detection system (precision), <bold>(C)</bold> health anomaly detection system (recall), <bold>(D)</bold> environmental monitoring system (F1-score), <bold>(E)</bold> comparative analysis (response time).</p>
</caption>
<graphic xlink:href="frai-08-1520592-g012.tif"/>
</fig>
</sec>
<sec id="sec22">
<label>5.2</label>
<title>Fall detection system (precision)</title>
<p>The fall detection system had the best performance with an accuracy of 99% as shown in <xref ref-type="fig" rid="fig12">Figure 12B</xref> and it was F1-score which gave the overall rating of the model. 1%, precision of 98.9%, recall of 98. First, the precision was identified to be 91%, the recall 9%, and the F1 score 98.9%. This of course is helped further by the response time of 2. The choice of 5&#x202F;s allows for early alert creation, thus meeting the goal of raising the awareness of caregivers as soon as possible.</p>
</sec>
<sec id="sec23">
<label>5.3</label>
<title>Health anomaly detection system (recall)</title>
<p>They also showed satisfactory results in the health data anomaly detection operation where it makes an accuracy of 98 percent. 0%, precision of 97.8%, recall of 97.8% and an F1-score of 97.8%. The response time of 2. As illustrated in <xref ref-type="fig" rid="fig12">Figure 12C</xref>, the <inline-formula>
<mml:math id="M69">
<mml:mi>&#x0394;</mml:mi>
<mml:mi>P</mml:mi>
</mml:math>
</inline-formula> of the new design is less than the original design with a range of 0. This is about 8&#x202F;s, which is ideal because it will in a way facilitate timely alerts in the process of caregiving.</p>
</sec>
<sec id="sec24">
<label>5.4</label>
<title>Environmental monitoring system (F1-score)</title>
<p>Regarding the specific goals, the environmental hazard detection system had a 95 percent accuracy. 8%, precision of 93.8%, recall of 90. accuracy of 0%, a Precision of 86% Recall of 0%, and an F1-score of 91.8 within the minimum response time of 2.0&#x202F;s. In these metrics, the system&#x2019;s capacity to quickly identify adverse states and notify the caregivers is well illustrated in <xref ref-type="fig" rid="fig12">Figure 12D</xref>.</p>
</sec>
<sec id="sec25">
<label>5.5</label>
<title>Comparative analysis (response time)</title>
<p>The comparative analysis with the existing solutions reveals a&#x202F;+&#x202F;52% increase in accuracy and precision, a&#x202F;+&#x202F;62% increase in recall, and improvements to the response time. The proposed system yielded higher results than the pre-existing systems in all forms of case study, it corroborated the maximum capability in giving correct and timely alerts noteworthy for elderly care as shown in <xref ref-type="fig" rid="fig12">Figure 12E</xref>.</p>
</sec>
<sec id="sec26">
<label>5.6</label>
<title>Overall system performance</title>
<p>The absolute performance parameters of the system can be marked as relatively high &#x2013; an accuracy of 98.0%, precision of 95.0%, recall of 94. It yielded an accuracy of 0%, and an F1 score of 94.5%, with the tested response time being 2.4&#x202F;s, which enhances the system&#x2019;s ability to monitor the situation continuously and give a quick response. The availability and reliability measures of the system that have been discussed over a period of 3&#x202F;months give an account of how sound and strong the system is and what portions require improvements, except for a trivial dip as shown in <xref ref-type="fig" rid="fig13">Figure 13</xref>.</p>
<fig position="float" id="fig13">
<label>Figure 13</label>
<caption>
<p>Performance of system uptime and reliability metrics.</p>
</caption>
<graphic xlink:href="frai-08-1520592-g013.tif"/>
</fig>
<p>Various real-world scenarios were put into test, such as the quality of sensor data or the level of environmental noise. The fall detection system, for example, performed well in high-quality sensor data scenarios with an accuracy of 99.1%, while under conditions of low-quality or noisy sensor data, the accuracy slightly dropped to 94.3%. For example, in health anomaly detection, the system showed a consistent recall of 97.8% under normal conditions but a slight fluctuation of &#x00B1;2% under highly noisy environments. In environmental monitoring, the model shows a decrease in specificity from 98.8 to 95.2% under fluctuating humidity or high background interference conditions. These findings have underlined the strength of the model in terms of adaptability and robustness, hence giving good insight into its reliability in various real-world scenarios.</p>
</sec>
</sec>
<sec id="sec27">
<label>6</label>
<title>System scalability</title>
<p>The proposed system is designed to handle an increasing number of users and sensors by leveraging modular architecture and scalable cloud-based infrastructure. Data from wearable and environmental sensors are processed locally through edge computing to minimize latency, while cloud integration enables centralized data aggregation and model updates.<list list-type="order">
<list-item>
<p>
<bold>Handling an Increasing Number of Users</bold>
</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>The system employs a distributed architecture, where each user&#x2019;s data is processed individually at the edge before being integrated into the cloud. This ensures that the addition of new users does not overburden centralized processing units.</p>
</list-item>
<list-item>
<p>Adaptive data sampling reduces the frequency of data collection during low-activity periods, conserving bandwidth and computational resources.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p>
<bold>Scaling Sensor Networks</bold>
</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p>The system supports dynamic sensor addition by utilizing standardized communication protocols (e.g., MQTT, Zigbee). New sensors can be integrated seamlessly without disrupting existing operations.</p>
</list-item>
<list-item>
<p>Anomaly detection models are periodically retrained using synthetic data generated by GANs, ensuring robustness even as sensor networks grow in complexity.</p>
</list-item>
</list><list list-type="order">
<list-item>
<p>
<bold>Potential Challenges and Mitigation</bold>
</p>
</list-item>
</list><list list-type="bullet">
<list-item>
<p><bold>Data Overload</bold>: An exponential increase in data could lead to bottlenecks. This is mitigated by prioritizing critical data streams and implementing real-time filtering at the edge.</p>
</list-item>
<list-item>
<p><bold>Interoperability Issues</bold>: Different sensor types and communication standards may hinder integration. To address this, the system employs a middleware layer that translates data formats into a unified standard.</p>
</list-item>
<list-item>
<p><bold>Energy Consumption</bold>: Expanding sensor networks increase energy demands. Energy-efficient designs, such as adaptive sampling and low-power communication protocols, are incorporated to maintain sustainability.</p>
</list-item>
</list></p>
</sec>
<sec sec-type="conclusions" id="sec28">
<label>7</label>
<title>Conclusion</title>
<p>The study focuses on creating an enhanced AI model that will assist elderly citizens experiencing challenges in smart homes that are under IoT technology. The framework employs IoT sensors to monitor regularly important health indices, compliance with medications, and activities among other aspects of the environment. Among the methods that have been employed in development include: making a highly accurate and reliable fall detection model which was to be validated based on the following features. Analyzing the received results, it is possible to state the positive impact of the discussed AI system in enhancing care efficiency and the elderly people&#x2019;s rights and safety.</p>
<p>This framework has a great application-related potential since it will enable the elderly to live more independently in secure IoT-enabled environments&#x2014;improving their lives. The eventual capability of the system for early warnings and personalized interventions reduces the burden on caregivers, allowing them to be free for higher-level service provision. Such advantages make the proposed system rank in very high positions with respect to finding a solution regarding the growth of elderly caretaking in contemporary society. The care plans that are formulated are personal and; therefore, the older people improve their health, especially in the areas that affect their ability to live independently. In sum, the findings of this study can be beneficial in understanding how smart home technologies can be utilized and have given a framework that proves to be useful in addressing the dynamic needs of the aging population in an increasingly technologically oriented society.</p>
<p>In this respect, reinforcement learning may help in optimizing adaptive response times, for which future research could include the investigation aspect of such dynamic adaptation based on different environmental and user-specific conditions. The hybrid models that will combine real-world feedback of users and caregivers will further refine the prediction and intervention, improving overall system reliability and personalization. The other potential furtherance of the framework could include studying the scalability of the framework in different healthcare environments and investigating further advanced IoT architectures for improved data fidelity. This will continue to reinforce the system&#x2019;s capability for robust and user-oriented elderly care.</p>
<p>In the years to come, new enhancements are possible through wearable IoT devices powered by edge AI, including edge processing for on-the-spur-of-the-moment decisions and without delays inherent in cloud processing. Such a coming together of different technologies also provides on-device, real-time analytics for health-related data and to realize urgent responses for dangerous situations or conditions. There is also vital scalability regarding seamless interoperability between the differing IoT devices with various frames of AI. Standardized protocols and the use of modular architectures could further help in this respect, providing a seamless environment that can support dynamic data exchange and adaptive functionality. These directions open the possibility for robust, scalable solutions for personalized elderly care.</p>
</sec>
</body>
<back>
<sec sec-type="data-availability" id="sec29">
<title>Data availability statement</title>
<p>The raw data supporting the conclusions of this article will be made available by the authors, without undue reservation.</p>
</sec>
<sec sec-type="ethics-statement" id="sec30">
<title>Ethics statement</title>
<p>The studies involving humans were approved by PSAU Ethics Committee, Prince Sattam bin Abdulaziz University. The studies were conducted in accordance with the local legislation and institutional requirements. The participants provided their written informed consent to participate in this study.</p>
</sec>
<sec sec-type="author-contributions" id="sec31">
<title>Author contributions</title>
<p>FN: Conceptualization, Investigation, Methodology, Supervision, Validation, Visualization, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing, Formal analysis. AA: Conceptualization, Investigation, Methodology, Supervision, Validation, Visualization, Writing &#x2013; original draft, Writing &#x2013; review &#x0026; editing, Funding acquisition, Project administration, Resources. MT: Formal analysis, Investigation, Methodology, Visualization, Writing &#x2013; review &#x0026; editing, Software. MK: Formal analysis, Investigation, Methodology, Visualization, Writing &#x2013; review &#x0026; editing, Conceptualization, Project administration, Resources, Supervision, Validation, Writing &#x2013; original draft. NS: Data curation, Formal analysis, Investigation, Software, Writing &#x2013; original draft.</p>
</sec>
<sec sec-type="funding-information" id="sec32">
<title>Funding</title>
<p>The author(s) declare that financial support was received for the research, authorship, and/or publication of this article. This work was supported by the Prince Sattam bin Abdulaziz University through the project number (PSAU/2024/01/99520).</p>
</sec>
<ack>
<p>We extend our sincere gratitude and appreciate the resources and assistance provided by the Ministry of Education and all universities, which were instrumental in the completion of this study.</p>
</ack>
<sec sec-type="COI-statement" id="sec33">
<title>Conflict of interest</title>
<p>The authors declare that the research was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="sec34">
<title>Generative AI statement</title>
<p>The authors declare that no Generative AI was used in the creation of this manuscript.</p>
</sec>
<sec sec-type="disclaimer" id="sec35">
<title>Publisher&#x2019;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="ref1"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Addas</surname> <given-names>A.</given-names></name> <name><surname>Khan</surname> <given-names>M. N.</given-names></name> <name><surname>Naseer</surname> <given-names>F.</given-names></name></person-group> (<year>2024</year>). <article-title>Waste management 2.0 leveraging internet of things for an efficient and eco-friendly smart city solution</article-title>. <source>PLoS One</source> <volume>19</volume>:<fpage>e0307608</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0307608</pub-id>, PMID: <pub-id pub-id-type="pmid">39083562</pub-id></citation></ref>
<ref id="ref2"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Aggarwal</surname> <given-names>A.</given-names></name> <name><surname>Mittal</surname> <given-names>M.</given-names></name> <name><surname>Battineni</surname> <given-names>G.</given-names></name></person-group> (<year>2021</year>). <article-title>Generative adversarial network: an overview of theory and applications</article-title>. <source>Int. J. Inf. Manag. Data Insights</source> <volume>1</volume>:<fpage>100004</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.jjimei.2020.100004</pub-id>, PMID: <pub-id pub-id-type="pmid">39843627</pub-id></citation></ref>
<ref id="ref3"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Ai</surname> <given-names>J.</given-names></name> <name><surname>Feng</surname> <given-names>J.</given-names></name> <name><surname>Yu</surname> <given-names>Y.</given-names></name></person-group> (<year>2022</year>). <article-title>Elderly care provision and the impact on caregiver health in China</article-title>. <source>China World Econ.</source> <volume>30</volume>, <fpage>206</fpage>&#x2013;<lpage>226</lpage>. doi: <pub-id pub-id-type="doi">10.1111/cwe.12443</pub-id></citation></ref>
<ref id="ref4"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Banerjee</surname> <given-names>A.</given-names></name> <name><surname>Chakraborty</surname> <given-names>C.</given-names></name> <name><surname>Kumar</surname> <given-names>A.</given-names></name> <name><surname>Biswas</surname> <given-names>D.</given-names></name></person-group> (<year>2020</year>). &#x201C;<article-title>Emerging trends in IoT and big data analytics for biomedical and health care technologies</article-title>&#x201D; in <source>Handbook of data science approaches for biomedical engineering</source> (<publisher-name>Elsevier</publisher-name>), <fpage>121</fpage>&#x2013;<lpage>152</lpage>.  doi: <pub-id pub-id-type="doi">10.1016/B978-0-12-818318-2.00005-2</pub-id></citation></ref>
<ref id="ref5"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Cai</surname> <given-names>Z.</given-names></name> <name><surname>Xiong</surname> <given-names>Z.</given-names></name> <name><surname>Xu</surname> <given-names>H.</given-names></name> <name><surname>Wang</surname> <given-names>P.</given-names></name> <name><surname>Li</surname> <given-names>W.</given-names></name> <name><surname>Pan</surname> <given-names>Y.</given-names></name></person-group> (<year>2021</year>). <article-title>Generative adversarial networks</article-title>. <source>ACM Comput. Surv.</source> <volume>54</volume>, <fpage>1</fpage>&#x2013;<lpage>38</lpage>. doi: <pub-id pub-id-type="doi">10.1145/3459992</pub-id></citation></ref>
<ref id="ref6"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>X.</given-names></name> <name><surname>Fu</surname> <given-names>Z.</given-names></name> <name><surname>Song</surname> <given-names>Z.</given-names></name> <name><surname>Yang</surname> <given-names>L.</given-names></name> <name><surname>Ndifon</surname> <given-names>A. M.</given-names></name> <name><surname>Su</surname> <given-names>Z.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>An IoT and wearables based smart home for ALS patients</article-title>. <source>IEEE Internet Things J.</source> <volume>9</volume>, <fpage>20945</fpage>&#x2013;<lpage>20956</lpage>. doi: <pub-id pub-id-type="doi">10.1109/jiot.2022.3176202</pub-id>, PMID: <pub-id pub-id-type="pmid">39573497</pub-id></citation></ref>
<ref id="ref7"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Conroy</surname> <given-names>K. M.</given-names></name> <name><surname>Krishnan</surname> <given-names>S.</given-names></name> <name><surname>Mittelstaedt</surname> <given-names>S.</given-names></name> <name><surname>Patel</surname> <given-names>S. S.</given-names></name></person-group> (<year>2020</year>). <article-title>Technological advancements to address elderly loneliness: practical considerations and community resilience implications for COVID-19 pandemic</article-title>. <source>Work. Older People</source> <volume>24</volume>, <fpage>257</fpage>&#x2013;<lpage>264</lpage>. doi: <pub-id pub-id-type="doi">10.1108/wwop-07-2020-0036</pub-id>, PMID: <pub-id pub-id-type="pmid">33679208</pub-id></citation></ref>
<ref id="ref8"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Dakshit</surname> <given-names>S.</given-names></name> <name><surname>Prabhakaran</surname> <given-names>B.</given-names></name></person-group> (<year>2024</year>). <article-title>Generative adversarial networks for biomedical imaging</article-title>. <source>IEEE MultiMed.</source> <volume>31</volume>, <fpage>5</fpage>&#x2013;<lpage>6</lpage>. doi: <pub-id pub-id-type="doi">10.1109/mmul.2024.3448100</pub-id></citation></ref>
<ref id="ref9"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Das</surname> <given-names>D. R.</given-names></name> <name><surname>Satpathy</surname> <given-names>I.</given-names></name> <name><surname>Patnaik</surname> <given-names>B. C. M.</given-names></name></person-group> (<year>2023</year>). &#x201C;<article-title>AI-enabled healthcare service</article-title>&#x201D; in <source>AI and IoT-based technologies for precision medicine</source> (<publisher-name>IGI Global</publisher-name>), <fpage>108</fpage>&#x2013;<lpage>120</lpage>.</citation></ref>
<ref id="ref10"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Ding</surname> <given-names>S.</given-names></name> <name><surname>Wu</surname> <given-names>D.</given-names></name> <name><surname>Zhao</surname> <given-names>L.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name></person-group> (<year>2022</year>). &#x201C;<article-title>Intelligent hospital operation management and risk control</article-title>&#x201D; in <source>Smart healthcare engineering management and risk analytics</source> (<publisher-loc>Singapore</publisher-loc>: <publisher-name>Springer Nature Singapore</publisher-name>), <fpage>189</fpage>&#x2013;<lpage>206</lpage>.</citation></ref>
<ref id="ref11"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Elder</surname> <given-names>C.</given-names></name></person-group> (<year>2017</year>). <article-title>Health care systems support to enhance patient-centered care: lessons from a primary care-based chronic pain management initiative</article-title>. <source>Perm. J.</source> <volume>21</volume>, <fpage>16</fpage>&#x2013;<lpage>101</lpage>. doi: <pub-id pub-id-type="doi">10.7812/tpp/16-101</pub-id>, PMID: <pub-id pub-id-type="pmid">28406791</pub-id></citation></ref>
<ref id="ref12"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Hassan</surname> <given-names>F.</given-names></name> <name><surname>Shaheen</surname> <given-names>M. E.</given-names></name> <name><surname>Sahal</surname> <given-names>R.</given-names></name></person-group> (<year>2020</year>). <article-title>Real-time healthcare monitoring system using online machine learning and spark streaming</article-title>. <source>Int. J. Adv. Comput. Sci. Appl.</source> <volume>11</volume>. doi: <pub-id pub-id-type="doi">10.14569/ijacsa.2020.0110977</pub-id></citation></ref>
<ref id="ref13"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Huynh</surname> <given-names>N.</given-names></name> <name><surname>Yan</surname> <given-names>D.</given-names></name> <name><surname>Ma</surname> <given-names>Y.</given-names></name> <name><surname>Wu</surname> <given-names>S.</given-names></name> <name><surname>Long</surname> <given-names>C.</given-names></name> <name><surname>Sami</surname> <given-names>M. T.</given-names></name> <etal/></person-group>. (<year>2024</year>). <article-title>The use of generative adversarial network and graph convolution network for neuroimaging-based diagnostic classification</article-title>. <source>Brain Sci.</source> <volume>14</volume>:<fpage>456</fpage>. doi: <pub-id pub-id-type="doi">10.3390/brainsci14050456</pub-id>, PMID: <pub-id pub-id-type="pmid">38790434</pub-id></citation></ref>
<ref id="ref14"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Junaid</surname> <given-names>S. B.</given-names></name> <name><surname>Imam</surname> <given-names>A. A.</given-names></name> <name><surname>Balogun</surname> <given-names>A. O.</given-names></name> <name><surname>De Silva</surname> <given-names>L. C.</given-names></name> <name><surname>Surakat</surname> <given-names>Y. A.</given-names></name> <name><surname>Kumar</surname> <given-names>G.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>Recent advancements in emerging technologies for healthcare management systems: a survey</article-title>. <source>Healthcare</source> <volume>10</volume>:<fpage>1940</fpage>. doi: <pub-id pub-id-type="doi">10.3390/healthcare10101940</pub-id>, PMID: <pub-id pub-id-type="pmid">36292387</pub-id></citation></ref>
<ref id="ref15"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Khan</surname> <given-names>M. N.</given-names></name> <name><surname>Altalbe</surname> <given-names>A.</given-names></name> <name><surname>Naseer</surname> <given-names>F.</given-names></name> <name><surname>Awais</surname> <given-names>Q.</given-names></name></person-group> (<year>2024</year>). <article-title>Telehealth-enabled in-home elbow rehabilitation for brachial plexus injuries using deep-reinforcement-learning-assisted telepresence robots</article-title>. <source>Sensors</source> <volume>24</volume>:<fpage>1273</fpage>. doi: <pub-id pub-id-type="doi">10.3390/s24041273</pub-id>, PMID: <pub-id pub-id-type="pmid">38400431</pub-id></citation></ref>
<ref id="ref16"><citation citation-type="confproc"><person-group person-group-type="author"><name><surname>Khan</surname> <given-names>M. N.</given-names></name> <name><surname>Naseer</surname> <given-names>F.</given-names></name></person-group>, (<year>2020</year>). <article-title>IoT based university garbage monitoring system for healthy environment for students</article-title>. In: <conf-name>2020 IEEE 14th International Conference on Semantic Computing (ICSC), 3&#x2013;5 February 2020</conf-name>, <publisher-loc>San Diego, CA, USA</publisher-loc> <publisher-name>IEEE</publisher-name>.</citation></ref>
<ref id="ref17"><citation citation-type="confproc"><person-group person-group-type="author"><name><surname>Kim</surname> <given-names>S.</given-names></name> <name><surname>Lee</surname> <given-names>C. H.</given-names></name> <name><surname>Yun</surname> <given-names>J. S.</given-names></name> <name><surname>Song</surname> <given-names>S. Y.</given-names></name> <name><surname>Na</surname> <given-names>K. J.</given-names></name></person-group>, (<year>2020</year>). <article-title>Perioperative critical care in old male patient with intrapericardial diaphragmatic hernia and complex diseases</article-title>. In: <conf-name>American Thoracic Society 2020 International Conference, May 15&#x2013;20, 2020</conf-name>. <publisher-loc>Philadelphia, PA</publisher-loc>: <publisher-name>American Thoracic Society</publisher-name>.</citation></ref>
<ref id="ref18"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Kumar</surname> <given-names>A.</given-names></name> <name><surname>Jain</surname> <given-names>R.</given-names></name> <name><surname>Gupta</surname> <given-names>M.</given-names></name> <name><surname>Islam</surname> <given-names>S.</given-names></name> <name><surname>Kumari</surname> <given-names>S.</given-names></name> <name><surname>Sachdeva</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2023</year>). <source>Mobile healthcare applications. 6G-enabled IoT and AI for smart healthcare: challenges, impact, and analysis</source>. <publisher-loc>Boca Raton</publisher-loc>: <publisher-name>CRC Press</publisher-name>, <fpage>69</fpage>&#x2013;<lpage>119</lpage>.</citation></ref>
<ref id="ref19"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mohammadzadeh</surname> <given-names>N.</given-names></name> <name><surname>Gholamzadeh</surname> <given-names>M.</given-names></name> <name><surname>Saeedi</surname> <given-names>S.</given-names></name> <name><surname>Rezayi</surname> <given-names>S.</given-names></name></person-group> (<year>2020</year>). <article-title>The application of wearable smart sensors for monitoring the vital signs of patients in epidemics: a systematic literature review</article-title>. <source>J. Ambient Intell. Humaniz. Comput.</source> <volume>14</volume>, <fpage>6027</fpage>&#x2013;<lpage>6041</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s12652-020-02656-x</pub-id>, PMID: <pub-id pub-id-type="pmid">33224305</pub-id></citation></ref>
<ref id="ref20"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Monlezun</surname> <given-names>D. J.</given-names></name></person-group> (<year>2023</year>). &#x201C;<article-title>AI + healthcare systems</article-title>&#x201D; in <source>The thinking healthcare system</source> (<publisher-name>Elsevier</publisher-name>), <fpage>37</fpage>&#x2013;<lpage>67</lpage>.</citation></ref>
<ref id="ref21"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Mullankandy</surname> <given-names>D. S.</given-names></name> <name><surname>Kazmi</surname> <given-names>I.</given-names></name> <name><surname>Islam</surname> <given-names>T.</given-names></name> <name><surname>Phia</surname> <given-names>W. J.</given-names></name></person-group> (<year>2024</year>). <article-title>Emerging trends in ai-driven health tech: a comprehensive review and future prospects</article-title>. <source>Eur. J. Technol.</source> <volume>8</volume>, <fpage>25</fpage>&#x2013;<lpage>40</lpage>. doi: <pub-id pub-id-type="doi">10.47672/ejt.1888</pub-id></citation></ref>
<ref id="ref22"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Muthu</surname> <given-names>B.</given-names></name> <name><surname>Sivaparthipan</surname> <given-names>C. B.</given-names></name> <name><surname>Manogaran</surname> <given-names>G.</given-names></name> <name><surname>Sundarasekar</surname> <given-names>R.</given-names></name> <name><surname>Kadry</surname> <given-names>S.</given-names></name> <name><surname>Shanthini</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2020</year>). <article-title>IoT based wearable sensor for diseases prediction and symptom analysis in healthcare sector</article-title>. <source>Peer Peer Netw. Appl.</source> <volume>13</volume>, <fpage>2123</fpage>&#x2013;<lpage>2134</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s12083-019-00823-2</pub-id></citation></ref>
<ref id="ref23"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Narejo</surname> <given-names>G. B.</given-names></name></person-group> (<year>2020</year>). &#x201C;<article-title>AI in healthcare</article-title>&#x201D; in <source>Privacy vulnerabilities and data security challenges in the IoT. First edition. | Boca Raton, FL: CRC press, [2021] | series: Internet of everything (IoE). Security and privacy paradig</source> (<publisher-name>CRC Press</publisher-name>), <fpage>77</fpage>&#x2013;<lpage>92</lpage>.</citation></ref>
<ref id="ref24"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Naseer</surname> <given-names>F.</given-names></name> <name><surname>Khan</surname> <given-names>M. N.</given-names></name> <name><surname>Altalbe</surname> <given-names>A.</given-names></name></person-group> (<year>2023a</year>). <article-title>Intelligent time delay control of telepresence robots using novel deep reinforcement learning algorithm to interact with patients</article-title>. <source>Appl. Sci.</source> <volume>13</volume>:<fpage>2462</fpage>. doi: <pub-id pub-id-type="doi">10.3390/app13042462</pub-id></citation></ref>
<ref id="ref25"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Naseer</surname> <given-names>F.</given-names></name> <name><surname>Khan</surname> <given-names>M. N.</given-names></name> <name><surname>Altalbe</surname> <given-names>A.</given-names></name></person-group> (<year>2023b</year>). <article-title>Telepresence robot with DRL assisted delay compensation in IoT-enabled sustainable healthcare environment</article-title>. <source>Sustainability</source> <volume>15</volume>:<fpage>3585</fpage>. doi: <pub-id pub-id-type="doi">10.3390/su15043585</pub-id></citation></ref>
<ref id="ref26"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Naseer</surname> <given-names>F.</given-names></name> <name><surname>Khan</surname> <given-names>M. N.</given-names></name> <name><surname>Rasool</surname> <given-names>A.</given-names></name> <name><surname>Ayub</surname> <given-names>N.</given-names></name></person-group> (<year>2023c</year>). <article-title>A novel approach to compensate delay in communication by predicting teleoperator behaviour using deep learning and reinforcement learning to control telepresence robot</article-title>. <source>Electron. Lett.</source> <volume>59</volume>:<fpage>e12806</fpage>. doi: <pub-id pub-id-type="doi">10.1049/ell2.12806</pub-id>, PMID: <pub-id pub-id-type="pmid">37957441</pub-id></citation></ref>
<ref id="ref27"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Naseer</surname> <given-names>F.</given-names></name> <name><surname>Nasir Khan</surname> <given-names>M.</given-names></name> <name><surname>Nawaz</surname> <given-names>Z.</given-names></name> <name><surname>Awais</surname> <given-names>Q.</given-names></name></person-group> (<year>2023d</year>). <article-title>Telepresence robots and controlling techniques in healthcare system</article-title>. <source>Comput. Mater. Con.</source> <volume>74</volume>, <fpage>6623</fpage>&#x2013;<lpage>6639</lpage>. doi: <pub-id pub-id-type="doi">10.32604/cmc.2023.035218</pub-id></citation></ref>
<ref id="ref28"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Nasr</surname> <given-names>M.</given-names></name> <name><surname>Islam</surname> <given-names>M. M.</given-names></name> <name><surname>Shehata</surname> <given-names>S.</given-names></name> <name><surname>Karray</surname> <given-names>F.</given-names></name> <name><surname>Quintana</surname> <given-names>Y.</given-names></name></person-group> (<year>2021</year>). <article-title>Smart healthcare in the age of AI: recent advances, challenges, and future prospects</article-title>. <source>IEEE Access</source> <volume>9</volume>, <fpage>145248</fpage>&#x2013;<lpage>145270</lpage>. doi: <pub-id pub-id-type="doi">10.1109/access.2021.3118960</pub-id></citation></ref>
<ref id="ref29"><citation citation-type="book"><person-group person-group-type="editor"><name><surname>Nayak</surname> <given-names>R.</given-names></name> <name><surname>Mittal</surname> <given-names>N.</given-names></name> <name><surname>Kumar</surname> <given-names>M.</given-names></name> <name><surname>Polkowski</surname> <given-names>Z.</given-names></name> <name><surname>Khunteta</surname> <given-names>A.</given-names></name></person-group> (Eds.) (<year>2024</year>). <source>Recent advancements in artificial intelligence</source>. <publisher-loc>Singapore</publisher-loc>: <publisher-name>Springer Nature Singapore</publisher-name>.</citation></ref>
<ref id="ref30"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Patterson</surname> <given-names>A.</given-names></name> <name><surname>Harkey</surname> <given-names>L.</given-names></name> <name><surname>Jung</surname> <given-names>S.</given-names></name> <name><surname>Newton</surname> <given-names>E.</given-names></name></person-group> (<year>2021</year>). <article-title>Patient satisfaction with telehealth in rural settings: a systematic review</article-title>. <source>Am. J. Occup. Ther.</source> <volume>75</volume>:<fpage>7512520383p1</fpage>. doi: <pub-id pub-id-type="doi">10.5014/ajot.2021.75s2-po383</pub-id></citation></ref>
<ref id="ref31"><citation citation-type="confproc"><person-group person-group-type="author"><name><surname>Pinto</surname> <given-names>S.</given-names></name> <name><surname>Cabral</surname> <given-names>J.</given-names></name> <name><surname>Gomes</surname> <given-names>T.</given-names></name></person-group>, (<year>2017</year>). <article-title>We-care: an IoT-based health care system for elderly people</article-title>. In: <conf-name>2017 IEEE International Conference on Industrial Technology (ICIT), 22&#x2013;25 march 2017</conf-name>, <publisher-name>Toronto, ON</publisher-name>: <publisher-name>IEEE</publisher-name>.</citation></ref>
<ref id="ref32"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Pool</surname> <given-names>J.</given-names></name> <name><surname>Akhlaghpour</surname> <given-names>S.</given-names></name> <name><surname>Fatehi</surname> <given-names>F.</given-names></name> <name><surname>Gray</surname> <given-names>L. C.</given-names></name></person-group> (<year>2022</year>). <article-title>Data privacy concerns and use of telehealth in the aged care context: an integrative review and research agenda</article-title>. <source>Int. J. Med. Inform.</source> <volume>160</volume>:<fpage>104707</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.ijmedinf.2022.104707</pub-id>, PMID: <pub-id pub-id-type="pmid">35131698</pub-id></citation></ref>
<ref id="ref33"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Qian</surname> <given-names>K.</given-names></name> <name><surname>Zhang</surname> <given-names>Z.</given-names></name> <name><surname>Yamamoto</surname> <given-names>Y.</given-names></name> <name><surname>Schuller</surname> <given-names>B. W.</given-names></name></person-group> (<year>2021</year>). <article-title>Artificial intelligence internet of things for the elderly: from assisted living to health-care monitoring</article-title>. <source>IEEE Signal Process. Mag.</source> <volume>38</volume>, <fpage>78</fpage>&#x2013;<lpage>88</lpage>. doi: <pub-id pub-id-type="doi">10.1109/msp.2021.3057298</pub-id></citation></ref>
<ref id="ref34"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Shaban-Nejad</surname> <given-names>A.</given-names></name> <name><surname>Michalowski</surname> <given-names>M.</given-names></name> <name><surname>Bianco</surname> <given-names>S.</given-names></name></person-group> (<year>2022</year>). &#x201C;<article-title>Multimodal artificial intelligence: next wave of innovation in healthcare and medicine</article-title>&#x201D; in <source>Multimodal AI in healthcare: a paradigm shift in health intelligence</source>, vol. <volume>1060</volume> (<publisher-loc>Cham</publisher-loc>: <publisher-name>Springer International Publishing</publisher-name>), <fpage>1</fpage>&#x2013;<lpage>9</lpage>.</citation></ref>
<ref id="ref35"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Sivabalan</surname> <given-names>S.</given-names></name> <name><surname>Minu</surname> <given-names>R.</given-names></name></person-group> (<year>2022</year>). &#x201C;<article-title>Edge AI for industrial IoT applications</article-title>&#x201D; in <source>Applied edge AI: concepts, platforms, and industry use cases</source> (<publisher-loc>Boca Raton</publisher-loc>: <publisher-name>Auerbach Publications</publisher-name>), <fpage>147</fpage>&#x2013;<lpage>170</lpage>.</citation></ref>
<ref id="ref36"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Sokullu</surname> <given-names>R.</given-names></name> <name><surname>Akka&#x015F;</surname> <given-names>M. A.</given-names></name> <name><surname>Demir</surname> <given-names>E.</given-names></name></person-group> (<year>2020</year>). <article-title>IoT supported smart home for the elderly</article-title>. <source>Internet Things</source> <volume>11</volume>:<fpage>100239</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.iot.2020.100239</pub-id>, PMID: <pub-id pub-id-type="pmid">39843627</pub-id></citation></ref>
<ref id="ref37"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Stavropoulos</surname> <given-names>T. G.</given-names></name> <name><surname>Papastergiou</surname> <given-names>A.</given-names></name> <name><surname>Mpaltadoros</surname> <given-names>L.</given-names></name> <name><surname>Nikolopoulos</surname> <given-names>S.</given-names></name> <name><surname>Kompatsiaris</surname> <given-names>I.</given-names></name></person-group> (<year>2020</year>). <article-title>IoT wearable sensors and devices in elderly care: a literature review</article-title>. <source>Sensors</source> <volume>20</volume>:<fpage>2826</fpage>. doi: <pub-id pub-id-type="doi">10.3390/s20102826</pub-id>, PMID: <pub-id pub-id-type="pmid">32429331</pub-id></citation></ref>
<ref id="ref38"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Subburam</surname> <given-names>R.</given-names></name> <name><surname>Chandralekha</surname> <given-names>E.</given-names></name> <name><surname>Kandasamy</surname> <given-names>V.</given-names></name></person-group> (<year>2024</year>). &#x201C;<article-title>An elderly fall detection system using enhanced random forest in machine learning</article-title>&#x201D; in <source>RAiSE-2023</source> (<publisher-loc>Basel Switzerland</publisher-loc>: <publisher-name>MDPI</publisher-name>).</citation></ref>
<ref id="ref39"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Tariq</surname> <given-names>M. U.</given-names></name></person-group> (<year>2024</year>). <source>Advanced wearable medical devices and their role in transformative remote health monitoring. In: Advances in healthcare information systems and administration</source>: <publisher-name>IGI Global</publisher-name>, <fpage>308</fpage>&#x2013;<lpage>326</lpage>.</citation></ref>
<ref id="ref40"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>Z.</given-names></name> <name><surname>Liu</surname> <given-names>D.</given-names></name> <name><surname>Sun</surname> <given-names>Y.</given-names></name> <name><surname>Pang</surname> <given-names>X.</given-names></name> <name><surname>Sun</surname> <given-names>P.</given-names></name> <name><surname>Lin</surname> <given-names>F.</given-names></name> <etal/></person-group>. (<year>2022</year>). <article-title>A survey on IoT-enabled home automation systems: attacks and defenses</article-title>. <source>IEEE Commun. Surv. Tutor.</source> <volume>24</volume>, <fpage>2292</fpage>&#x2013;<lpage>2328</lpage>. doi: <pub-id pub-id-type="doi">10.1109/comst.2022.3201557</pub-id>, PMID: <pub-id pub-id-type="pmid">39573497</pub-id></citation></ref>
<ref id="ref41"><citation citation-type="confproc"><person-group person-group-type="author"><name><surname>Wicaksono</surname> <given-names>H.</given-names></name> <name><surname>Santoso</surname> <given-names>P.</given-names></name> <name><surname>Sugiarto</surname> <given-names>I.</given-names></name> <name><surname>Dwipanjung</surname> <given-names>F.</given-names></name></person-group>, (<year>2023</year>). <article-title>Voice-controlled smart home prototype to assist an elder in home care</article-title>. In: <conf-name>International Conference on Industrial, Enterprise, and System Engineering: Collaboration of Science, Technology, and Innovation Toward Sustainable Development</conf-name>, <publisher-loc>Bandung, Indonesia</publisher-loc> <publisher-name>AIP Publishing</publisher-name>.</citation></ref>
<ref id="ref42"><citation citation-type="book"><person-group person-group-type="author"><name><surname>Wojda</surname> <given-names>T.</given-names></name> <name><surname>Hoffman</surname> <given-names>C.</given-names></name> <name><surname>Jackson</surname> <given-names>J.</given-names></name> <name><surname>Conti</surname> <given-names>T.</given-names></name> <name><surname>Maier</surname> <given-names>J.</given-names></name></person-group> (<year>2023</year>). &#x201C;<article-title>AI in healthcare: implications for family medicine and primary care</article-title>&#x201D; in <source>Artificial intelligence</source> (<publisher-name>IntechOpen</publisher-name>).</citation></ref>
<ref id="ref43"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>S.</given-names></name> <name><surname>Qian</surname> <given-names>Z.</given-names></name> <name><surname>Huang</surname> <given-names>K.</given-names></name> <name><surname>Zhang</surname> <given-names>R.</given-names></name> <name><surname>Xiao</surname> <given-names>J.</given-names></name> <name><surname>He</surname> <given-names>Y.</given-names></name> <etal/></person-group>. (<year>2023</year>). <article-title>Robust generative adversarial network</article-title>. <source>Mach. Learn.</source> <volume>112</volume>, <fpage>5135</fpage>&#x2013;<lpage>5161</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10994-023-06367-0</pub-id></citation></ref>
<ref id="ref44"><citation citation-type="journal"><person-group person-group-type="author"><name><surname>Zheng</surname> <given-names>K.</given-names></name> <name><surname>Wei</surname> <given-names>M.</given-names></name> <name><surname>Sun</surname> <given-names>G.</given-names></name> <name><surname>Anas</surname> <given-names>B.</given-names></name> <name><surname>Li</surname> <given-names>Y.</given-names></name></person-group> (<year>2019</year>). <article-title>Using vehicle synthesis generative adversarial networks to improve vehicle detection in remote sensing images</article-title>. <source>ISPRS Int. J. Geo-Inf.</source> <volume>8</volume>:<fpage>390</fpage>. doi: <pub-id pub-id-type="doi">10.3390/ijgi8090390</pub-id></citation></ref>
</ref-list>
</back>
</article>