<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "JATS-journalpublishing1-3-mathml3.dtd">
<article xml:lang="EN" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" dtd-version="1.3" article-type="research-article">
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">Front. Comput. Neurosci.</journal-id>
<journal-title-group>
<journal-title>Frontiers in Computational Neuroscience</journal-title>
<abbrev-journal-title abbrev-type="pubmed">Front. Comput. Neurosci.</abbrev-journal-title>
</journal-title-group>
<issn pub-type="epub">1662-5188</issn>
<publisher>
<publisher-name>Frontiers Media S.A.</publisher-name>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.3389/fncom.2025.1731452</article-id>
<article-version article-version-type="Version of Record" vocab="NISO-RP-8-2008"/>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Original Research</subject>
</subj-group>
</article-categories>
<title-group>
<article-title>F<sup>2</sup>-CommNet: Fourier&#x02013;Fractional neural networks with Lyapunov stability guarantees for hallucination-resistant community detection</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes" equal-contrib="yes">
<name><surname>Qu</surname> <given-names>Daozheng</given-names></name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<xref ref-type="author-notes" rid="fn001"><sup>&#x02020;</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x00026; editing</role>
<uri xlink:href="https://loop.frontiersin.org/people/3238426"/>
</contrib>
<contrib contrib-type="author" corresp="yes" equal-contrib="yes">
<name><surname>Ma</surname> <given-names>Yanfei</given-names></name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
<xref ref-type="corresp" rid="c001"><sup>&#x0002A;</sup></xref>
<xref ref-type="author-notes" rid="fn001"><sup>&#x02020;</sup></xref>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Conceptualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/conceptualization/">Conceptualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Data curation" vocab-term-identifier="https://credit.niso.org/contributor-roles/data-curation/">Data curation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Formal analysis" vocab-term-identifier="https://credit.niso.org/contributor-roles/formal-analysis/">Formal analysis</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Funding acquisition" vocab-term-identifier="https://credit.niso.org/contributor-roles/funding-acquisition/">Funding acquisition</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Investigation" vocab-term-identifier="https://credit.niso.org/contributor-roles/investigation/">Investigation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Methodology" vocab-term-identifier="https://credit.niso.org/contributor-roles/methodology/">Methodology</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Project administration" vocab-term-identifier="https://credit.niso.org/contributor-roles/project-administration/">Project administration</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Resources" vocab-term-identifier="https://credit.niso.org/contributor-roles/resources/">Resources</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Software" vocab-term-identifier="https://credit.niso.org/contributor-roles/software/">Software</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Supervision" vocab-term-identifier="https://credit.niso.org/contributor-roles/supervision/">Supervision</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Validation" vocab-term-identifier="https://credit.niso.org/contributor-roles/validation/">Validation</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Visualization" vocab-term-identifier="https://credit.niso.org/contributor-roles/visualization/">Visualization</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; original draft" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-original-draft/">Writing &#x2013; original draft</role>
<role vocab="credit" vocab-identifier="https://credit.niso.org/" vocab-term="Writing &#x2013; review &amp; editing" vocab-term-identifier="https://credit.niso.org/contributor-roles/writing-review-editing/">Writing &#x2013; review &#x00026; editing</role>
<uri xlink:href="https://loop.frontiersin.org/people/3316120"/>
</contrib>
</contrib-group>
<aff id="aff1"><label>1</label><institution>Department of Computer Science, University of Liverpool</institution>, <city>Liverpool</city>, <country country="gb">United Kingdom</country></aff>
<aff id="aff2"><label>2</label><institution>Department of Computer Science, Fairleigh Dickinson University</institution>, <city>Vancouver, BC</city>, <country country="ca">Canada</country></aff>
<author-notes>
<corresp id="c001"><label>&#x0002A;</label>Correspondence: Yanfei Ma, <email xlink:href="mailto:yanfei.ma@ieee.org">yanfei.ma@ieee.org</email>; Daozheng Qu, <email xlink:href="mailto:daozheng.qu@gmail.com">daozheng.qu@gmail.com</email></corresp>
<fn fn-type="equal" id="fn001"><label>&#x02020;</label><p>These authors have contributed equally to this work and share first authorship</p></fn></author-notes>
<pub-date publication-format="electronic" date-type="pub" iso-8601-date="2026-01-21">
<day>21</day>
<month>01</month>
<year>2026</year>
</pub-date>
<pub-date publication-format="electronic" date-type="collection">
<year>2025</year>
</pub-date>
<volume>19</volume>
<elocation-id>1731452</elocation-id>
<history>
<date date-type="received">
<day>24</day>
<month>10</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>14</day>
<month>12</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>25</day>
<month>12</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-statement>Copyright &#x000A9; 2026 Qu and Ma.</copyright-statement>
<copyright-year>2026</copyright-year>
<copyright-holder>Qu and Ma</copyright-holder>
<license>
<ali:license_ref start_date="2026-01-21">https://creativecommons.org/licenses/by/4.0/</ali:license_ref>
<license-p>This is an open-access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution License (CC BY)</ext-link>. The use, distribution or reproduction in other forums is permitted, provided the original author(s) and the copyright owner(s) are credited and that the original publication in this journal is cited, in accordance with accepted academic practice. No use, distribution or reproduction is permitted which does not comply with these terms.</license-p>
</license>
</permissions>
<abstract>
<p>Community detection is a crucial task in network research, applicable to social systems, biology, cybersecurity, and knowledge graphs. Recent advancements in graph neural networks (GNNs) have exhibited significant representational capability; yet, they frequently experience instability and erroneous clustering, often referred to as &#x0201D;hallucinations.&#x0201D; These artifacts stem from sensitivity to high-frequency eigenmodes, over-parameterization, and noise amplification, undermining the robustness of learned communities. To mitigate these constraints, we present F<sup>2</sup>-CommNet, a Fourier&#x02013;Fractional neural framework that incorporates fractional-order dynamics, spectrum filtering, and Lyapunov-based stability analysis. The fractional operator implements long-memory dampening that mitigates oscillations, whereas Fourier spectral projections selectively attenuate eigenmodes susceptible to hallucination. Theoretical analysis delineates certain stability criteria under Lipschitz non-linearities and constrained disturbances, resulting in a demonstrable expansion of the Lyapunov margin. Experimental validation on synthetic and actual networks indicates that F<sup>2</sup>-CommNet reliably diminishes hallucination indices, enhances stability margins, and produces interpretable communities in comparison to integer-order GNN baselines. This study integrates fractional calculus, spectral graph theory, and neural network dynamics, providing a systematic method for hallucination-resistant community discovery.</p></abstract>
<kwd-group>
<kwd>dynamic community detection</kwd>
<kwd>fractional Fourier transform</kwd>
<kwd>fractional-order control and stability</kwd>
<kwd>fractional-order dynamical systems</kwd>
<kwd>fractional-order optimization</kwd>
<kwd>graph neural networks</kwd>
<kwd>Lyapunov stability</kwd>
<kwd>scalable graph learning</kwd>
</kwd-group>
<funding-group>
 <funding-statement>The author(s) declared that financial support was not received for this work and/or its publication.</funding-statement>
</funding-group>
<counts>
<fig-count count="11"/>
<table-count count="14"/>
<equation-count count="27"/>
<ref-count count="43"/>
<page-count count="21"/>
<word-count count="12063"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>section-at-acceptance</meta-name>
<meta-value></meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec sec-type="intro" id="s1">
<label>1</label>
<title>Introduction</title>
<p>Networks provide a robust abstraction for depicting complicated systems, with nodes representing things and edges signifying interactions. Identifying communities&#x02014;subsets of nodes characterized by dense internal connections and relatively sparse exterior links&#x02014;is crucial for comprehending structural and functional patterns in social, biological, and technological networks (<xref ref-type="bibr" rid="B3">Cai and Wang, 2023</xref>). Traditional techniques like modularity maximization, Infomap, and label propagation have demonstrated significant efficacy, whereas spectral clustering methods based on graph Laplacian theory offer robust mathematical assurances. Nonetheless, these techniques are frequently fragile, exhibiting sensitivity to noise and disturbances, especially in high-frequency spectrum modes.</p>
<p>The emergence of graph neural networks (GNNs) has revolutionized community discovery through the facilitation of data-driven embeddings of graph structures (<xref ref-type="bibr" rid="B40">Wu et al., 2021</xref>). Variants including Graph Convolutional Networks (GCNs), Graph Attention Networks (GATs), and spectral GNNs (<xref ref-type="bibr" rid="B1">Abbahaddou et al., 2024</xref>) achieve superior accuracy across benchmarks. Nonetheless, several intrinsic limitations persist: over-smoothing in deeper layers (<xref ref-type="bibr" rid="B23">Liu et al., 2024</xref>), diminished expressive capacity (<xref ref-type="bibr" rid="B6">Chen et al., 2025a</xref>), and vulnerability to spurious or unstable partitions&#x02014;phenomena we denote as &#x0201D;hallucinations&#x0201D; (<xref ref-type="bibr" rid="B12">Guo et al., 2025</xref>; <xref ref-type="bibr" rid="B5">Chen et al., 2023</xref>). These hallucinations arise from uncontrolled propagation dynamics, sensitivity to unstable eigenmodes, and an insufficient theoretical foundation.</p>
<p>Fractional-order calculus offers a viable solution to address these challenges. The inherent memory and smoothing properties enable dynamical systems to achieve a balance between responsiveness and stability, effectively mitigating oscillations and noise (<xref ref-type="bibr" rid="B16">Kang et al., 2024</xref>). Fractional-order neural models exhibit benefits in control (<xref ref-type="bibr" rid="B37">Sivalingam and Govindaraj, 2025</xref>), non-linear optimization (<xref ref-type="bibr" rid="B25">Maskey et al., 2023</xref>), and chaotic time-series regulation (<xref ref-type="bibr" rid="B19">Kumar et al., 2024</xref>). Notwithstanding these advancements, their incorporation into graph learning and community detection is being inadequately investigated. Simultaneously, Fourier spectral analysis has demonstrated efficacy in representing graph signals (<xref ref-type="bibr" rid="B28">Panda et al., 2025</xref>) and in the creation of reliable spectral graph filters (<xref ref-type="bibr" rid="B21">Levie et al., 2019</xref>); nevertheless, its integration with fractional dynamics for the suppression of hallucinations has to be systematically explored.</p>
<p><bold>Our main contributions are as follows:</bold></p>
<list list-type="bullet">
<list-item><p>This study develops F<sup>2</sup>-CommNet, a fractional-Fourier framework for dynamic community detection with explicit stability guarantees. In contrast to existing GNN-based models that are often heuristic and prone to instability, our approach is grounded in rigorous theory and validated on diverse benchmarks.</p></list-item>
<list-item><p>We establish a fractional Lyapunov framework for dynamic graphs, deriving analytical stability margins (&#x003C1;) and hallucination indices (&#x003B7;<sub>max</sub>) as quantitative criteria. The analysis shows that F<sup>2</sup>-CommNet enlarges the stability margin by more than <bold>3 &#x000D7; </bold> and reduces hallucination indices by up to <bold>35%</bold> compared with existing baselines, providing explicit stability guarantees for community detection.</p></list-item>
<list-item><p>We design F<sup>2</sup>-CommNet, a hybrid architecture that couples fractional-order neural dynamics with adaptive Fourier spectral filtering and stability-aware refinement. This joint design ensures convergence to robust partitions while maintaining near-linear computational complexity [<italic>O</italic>(<italic>nHd</italic>&#x0002B;<italic>nr</italic>log<italic>n</italic>)], enabling scalability to million-node networks in practice.</p></list-item>
<list-item><p>Extensive experiments on seven real-world benchmarks (Cora, Citeseer, PubMed, Reddit, Enron, DBLP, BioGRID) show that F<sup>2</sup>-CommNet improves ARI by up to <bold>25%</bold>, enhances NMI by <bold>15%</bold>, enlarges stability margin &#x003C1; by more than <bold>3 &#x000D7; </bold>, and reduces hallucination indices by up to <bold>35%</bold> compared with static baselines (GCN, GAT) and dynamic baselines (DyGCN, EvolveGCN). F<sup>2</sup>-CommNet achieves the best score on <bold>32 out of 35</bold> metric&#x02013;dataset pairs, demonstrating both robustness and generality across diverse domains.</p></list-item>
</list></sec>
<sec id="s2">
<label>2</label>
<title>Related work</title>
<p>Community detection in complex networks has been widely investigated in the past two decades. Classical approaches include modularity maximization and spectral clustering, which partition networks into cohesive groups of nodes. Fortunato&#x00027;s seminal survey provided a systematic overview of these methods and discussed their limitations in large-scale and dynamic scenarios. More recently, graph neural networks such as GCN and GAT have become standard baselines for learning community structure by integrating node features and network topology. However, these integer-order operators often suffer from instability and sensitivity to noise, especially in temporal settings.</p>
<p>Temporal networks introduce additional challenges. Masuda and Lambiotte laid the theoretical foundations of evolving graphs, while follow-up studies addressed dynamic community detection problems. Extensions such as TGN, DyGCN, and EvolveGCN generalize GNNs to temporal data, but they remain vulnerable to issues such as drifting clusters and hallucinated communities.</p>
<p>To address these challenges, fractional-order dynamics have recently gained attention as a mechanism for modeling long-memory effects. Fractional differential equations are well-established in physics and control, and their integration into neural models has led to promising advances. Recent studies on Neural Fractional-Order Differential Equations (<xref ref-type="bibr" rid="B13">Holme, 2023</xref>), variable-order extensions (<xref ref-type="bibr" rid="B20">Lambiotte and Rosvall, 2022</xref>), and stabilization analysis of fractional-order neural networks (<xref ref-type="bibr" rid="B4">Casteigts et al., 2023</xref>) demonstrate improved robustness and richer temporal dynamics compared to their integer-order counterparts.</p>
<p>Recent study has also explored centrality-aware and collaborative embedding methods for identifying overlapping or influence-driven community structures. In particular, the centrality-guided network embedding framework proposed in <xref ref-type="bibr" rid="B9">Cheng et al. (2025)</xref> integrates structural importance measures into node representations and is closely related to the type of structural guidance highlighted by the reviewer. Complementary approaches, such as hierarchical structural attention models (<xref ref-type="bibr" rid="B42">Yu et al., 2022</xref>) further emphasize node influence and multi-level structural patterns in static graphs. While proficient at identifying overlapping or hierarchical communities, these models are tailored for <italic>static</italic> networks and depend on centrality-based aims or structural attention mechanisms, failing to mitigate hallucination effects or temporal instability in dynamic graphs. Our F<sup>2</sup>-CommNet method complements previous research by emphasizing stability-aware clustering in dynamic environments via fractional-order refinement and Fourier spectral filtering.</p>
<p>Recent stability-oriented GNNs such as SO-GCN (<xref ref-type="bibr" rid="B8">Chen et al., 2025c</xref>) and LDC-GAT (<xref ref-type="bibr" rid="B7">Chen et al., 2025b</xref>) introduce constraint-based mechanisms to improve feature stability in semi-supervised node classification on static graphs. Although these methods offer significant insights for stabilizing message passing, their label-driven objectives and static configurations contrast with the unsupervised dynamic community detection problem addressed here, where clustering quality must be enhanced across evolving graph snapshots without supervision. Our suggested <italic>F</italic><sup>2</sup>-CommNet enhances this area of research by tackling stability in a temporal and unsupervised context via fractional-order neural dynamics and Fourier spectral filtering.</p>
<p>In addition to message-passing GNNs, contemporary Transformer-based architectures have been introduced for temporal graph modeling, frequently utilizing self-attention to capture long-range temporal relationships. These models have exhibited robust performance in tasks including link prediction and node forecasting. Nevertheless, the majority of Transformer-based graph methodologies are tailored for supervised or semi-supervised predictive tasks and depend on temporal labels or quadratic attention mechanisms, rendering them challenging to implement directly for large-scale unsupervised dynamic community detection. Therefore, although we recognize their significance in the wider context of dynamic graph learning, we do not consider them as directly comparable baselines in our experimental assessment.</p>
<p>Building upon these insights, our article introduces F<sup>2</sup>-CommNet, which integrates fractional-order neural dynamics with Fourier spectral filtering for community detection. Unlike prior dynamic GNNs, F<sup>2</sup>-CommNet provides both empirical robustness against hallucinations and theoretical guarantees on stability margins, bridging the gap between classical spectral methods, GNN-based approaches, and recent advances in fractional-order learning.</p></sec>
<sec id="s3">
<label>3</label>
<title>Methodology</title>
<sec>
<label>3.1</label>
<title>Framework overview</title>
<p>The proposed F<sup>2</sup>-CommNet framework integrates fractional-order neural dynamics, Fourier spectral filtering, and Lyapunov stability control into a unified graph-based learning system for dynamic community detection. The model enhances interpretability and robustness by embedding memory-dependent evolution and spectral regularization into the community learning process. The framework is illustrated in <xref ref-type="fig" rid="F1">Figure 1</xref>.</p>
<fig position="float" id="F1">
<label>Figure 1</label>
<caption><p>The Framework of F<sup>2</sup>-CommNet: Dynamic Community Detection with Stability Guarantees. The model processes graph snapshots through fractional-order neural dynamics for long-memory smoothing, Fourier spectral filtering for noise suppression, and Lyapunov stability refinement guided by a dedicated stability analysis module, to produce hallucination-free community assignments.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0001.tif">
<alt-text content-type="machine-generated">Flowchart illustrating the F&#x000B2;-CommNet framework. It begins with an input graph snapshot processed through a fractional-order neural dynamics layer for smoothing and temporal effects. Next, a Fourier spectral filtering module suppresses noise. The stability-aware embedding refinement step ensures bounded error and convergence. The process ends with output community assignments. A Lyapunov stability analysis supports the framework's stability, quantifying robustness.</alt-text>
</graphic>
</fig>
<p><bold>Step 1: Graph Input</bold>.</p>
<p>Given a sequence of graph snapshots {<italic>G</italic><sub><italic>t</italic></sub> &#x0003D; (<italic>V, E</italic><sub><italic>t</italic></sub>)} with node features <inline-formula><mml:math id="M1"><mml:msub><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>&#x02208;</mml:mo><mml:msup><mml:mrow><mml:mi>&#x0211D;</mml:mi></mml:mrow><mml:mrow><mml:mo>|</mml:mo><mml:mi>V</mml:mi><mml:mo>|</mml:mo><mml:mo>&#x000D7;</mml:mo><mml:mi>d</mml:mi></mml:mrow></mml:msup></mml:math></inline-formula> and Laplacian matrices <italic>L</italic><sub><italic>t</italic></sub> &#x0003D; <italic>D</italic><sub><italic>t</italic></sub>&#x02212;<italic>A</italic><sub><italic>t</italic></sub>, the model initializes node representations for temporal propagation.</p>
<p><bold>Step 2: Fractional Dynamics</bold>.</p>
<p>Node embeddings evolve under Caputo fractional-order differential equations:</p>
<disp-formula id="EQ1"><mml:math id="M2"><mml:mrow><mml:msubsup><mml:mrow><mml:mmultiscripts><mml:mi>D</mml:mi><mml:mprescripts/><mml:none/><mml:mi>C</mml:mi></mml:mmultiscripts></mml:mrow><mml:mi>T</mml:mi><mml:mi>&#x003B1;</mml:mi></mml:msubsup><mml:msub><mml:mi>X</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mo>&#x02212;</mml:mo><mml:mi>C</mml:mi><mml:msub><mml:mi>X</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:mi>W</mml:mi><mml:mi>f</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo stretchy='false'>)</mml:mo><mml:mo>+</mml:mo><mml:msub><mml:mi>U</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>,</mml:mo></mml:mrow></mml:math><label>(1)</label></disp-formula>
<p>where the fractional derivative <inline-formula><mml:math id="M3"><mml:mrow><mml:msubsup><mml:mrow><mml:mmultiscripts><mml:mi>D</mml:mi><mml:mprescripts/><mml:none/><mml:mi>C</mml:mi></mml:mmultiscripts></mml:mrow><mml:mi>T</mml:mi><mml:mi>&#x003B1;</mml:mi></mml:msubsup></mml:mrow></mml:math></inline-formula> introduces long-memory smoothing and non-local temporal effects into neural propagation.</p>
<p><bold>Step 3: Fourier Spectral Filtering</bold>.</p>
<p>Each snapshot is decomposed into Laplacian eigenmodes:</p>
<disp-formula id="EQ2"><mml:math id="M4"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msubsup><mml:mrow><mml:mi>U</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mo>&#x022A4;</mml:mo></mml:mrow></mml:msubsup><mml:msub><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:mtext>&#x02003;</mml:mtext><mml:msub><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mi>U</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(2)</label></disp-formula>
<p>where unstable high-frequency modes are attenuated by a spectral kernel &#x003D5;(&#x003BB;<sub><italic>k</italic></sub>), thereby reducing noise amplification.</p>
<p><bold>Step 4: Stability Monitoring</bold>.</p>
<p>To establish the Lyapunov-based stability bound, we assume a symmetric positive definite matrix <italic>P</italic>&#x0227B;0 such that the Lyapunov functional <italic>V</italic>(<italic>x</italic>) &#x0003D; <italic>x</italic><sup>&#x022A4;</sup><italic>Px</italic> is well-defined and radially unbounded. This standard assumption in fractional-order stability theory enables the derivation of provable error bounds under the proposed dynamics.</p>
<p>A Lyapunov margin &#x003C1; is estimated as</p>
<disp-formula id="EQ3"><mml:math id="M5"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mi>&#x003C1;</mml:mi><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mtext>&#x003BB;</mml:mtext></mml:mrow><mml:mrow><mml:mo class="qopname">min</mml:mo></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>P</mml:mi><mml:mi>C</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>-</mml:mo><mml:mi>F</mml:mi><mml:mo>||</mml:mo><mml:mi>P</mml:mi><mml:mi>W</mml:mi><mml:mo>||</mml:mo><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(3)</label></disp-formula>
<p>while the hallucination index &#x003B7;<sub><italic>k</italic></sub> &#x0003D; &#x003BB;<sub><italic>k</italic></sub><italic>F</italic>&#x02212;<italic>c</italic><sub><italic>k</italic></sub> is monitored for each eigenmode to assess spectral stability.</p>
<p><bold>Step 5: Community Partitioning</bold>.</p>
<p>The stabilized embeddings are clustered into communities {<italic>C</italic><sub><italic>t</italic></sub>} by maximizing the standard Newman&#x02013;Girvan modularity on each snapshot. The term &#x0201D;stability-aware&#x0201D; denotes that modularity optimization is conducted on embeddings that have been previously refined via fractional and Lyapunov-based stabilization modules, rather than on unprocessed features or adjacency matrices.</p></sec>
<sec>
<label>3.2</label>
<title>Fractional-order neural dynamics</title>
<p>We generalize graph neural evolution by introducing a Caputo fractional derivative of order &#x003B1; &#x02208; (0, 1):</p>
<disp-formula id="EQ4"><mml:math id="M6"><mml:mrow><mml:msubsup><mml:mrow><mml:mmultiscripts><mml:mi>D</mml:mi><mml:mprescripts/><mml:none/><mml:mi>C</mml:mi></mml:mmultiscripts></mml:mrow><mml:mi>T</mml:mi><mml:mi>&#x003B1;</mml:mi></mml:msubsup><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>=</mml:mo><mml:mo>&#x02212;</mml:mo><mml:msub><mml:mi>c</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:msub><mml:mi>x</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>+</mml:mo><mml:mstyle displaystyle='true'><mml:munder><mml:mo>&#x02211;</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>&#x02208;</mml:mo><mml:mi mathvariant='script'>N</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>i</mml:mi><mml:mo stretchy='false'>)</mml:mo></mml:mrow></mml:munder><mml:mrow><mml:msub><mml:mi>w</mml:mi><mml:mrow><mml:mi>i</mml:mi><mml:mi>j</mml:mi></mml:mrow></mml:msub></mml:mrow></mml:mstyle><mml:mi>f</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:msub><mml:mi>x</mml:mi><mml:mi>j</mml:mi></mml:msub><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo stretchy='false'>)</mml:mo><mml:mo>+</mml:mo><mml:msub><mml:mi>u</mml:mi><mml:mi>i</mml:mi></mml:msub><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>,</mml:mo></mml:mrow></mml:math><label>(4)</label></disp-formula>
<p>where <italic>x</italic><sub><italic>i</italic></sub>(<italic>t</italic>) denotes the state of node <italic>i</italic>, <italic>c</italic><sub><italic>i</italic></sub>&#x0003E;0 is a leakage coefficient, <italic>w</italic><sub><italic>ij</italic></sub> represents connection weights, <italic>f</italic>(&#x000B7;) is a non-linear activation, and <italic>u</italic><sub><italic>i</italic></sub>(<italic>t</italic>) is an external forcing term.</p>
<p>The Caputo derivative is defined as</p>
<disp-formula id="EQ5"><mml:math id="M7"><mml:mrow><mml:msubsup><mml:mrow><mml:mmultiscripts><mml:mi>D</mml:mi><mml:mprescripts/><mml:none/><mml:mi>C</mml:mi></mml:mmultiscripts></mml:mrow><mml:mi>T</mml:mi><mml:mi>&#x003B1;</mml:mi></mml:msubsup><mml:mi>x</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>=</mml:mo><mml:mfrac><mml:mn>1</mml:mn><mml:mrow><mml:mi>&#x00393;</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mn>1</mml:mn><mml:mo>&#x02212;</mml:mo><mml:mi>&#x003B1;</mml:mi><mml:mo stretchy='false'>)</mml:mo></mml:mrow></mml:mfrac><mml:mstyle displaystyle='true'><mml:mrow><mml:msubsup><mml:mo>&#x0222B;</mml:mo><mml:mn>0</mml:mn><mml:mi>t</mml:mi></mml:msubsup><mml:mrow><mml:mfrac><mml:mrow><mml:mover accent='true'><mml:mi>x</mml:mi><mml:mo>&#x002D9;</mml:mo></mml:mover><mml:mo stretchy='false'>(</mml:mo><mml:mi>&#x003C4;</mml:mi><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mrow><mml:msup><mml:mrow><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo>&#x02212;</mml:mo><mml:mi>&#x003C4;</mml:mi><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mi>&#x003B1;</mml:mi></mml:msup></mml:mrow></mml:mfrac></mml:mrow></mml:mrow></mml:mstyle><mml:mi>d</mml:mi><mml:mi>&#x003C4;</mml:mi><mml:mo>,</mml:mo></mml:mrow></mml:math><label>(5)</label></disp-formula>
<p>where &#x00393;(&#x000B7;) denotes the Gamma function. This expression reveals that the derivative depends on the entire historical trajectory <italic>x</italic>(&#x003C4;) for &#x003C4; &#x02264; <italic>t</italic>, embedding long-memory effects within the dynamics. Compared with the integer-order case (&#x003B1; &#x0003D; 1), fractional dynamics dampen oscillations and enlarge the convergence basin, improving robustness to perturbations.</p>
<p>To practically compute the fractional derivative <inline-formula><mml:math id="M8"><mml:mrow><mml:msubsup><mml:mrow><mml:mmultiscripts><mml:mi>D</mml:mi><mml:mprescripts/><mml:none/><mml:mi>C</mml:mi></mml:mmultiscripts></mml:mrow><mml:mi>T</mml:mi><mml:mi>&#x003B1;</mml:mi></mml:msubsup></mml:mrow></mml:math></inline-formula>, we adopt the truncated Gr&#x000FC;nwald&#x02013;Letnikov (GL) approximation:</p>
<disp-formula id="EQ6"><mml:math id="M9"><mml:mrow><mml:msubsup><mml:mrow><mml:mmultiscripts><mml:mi>D</mml:mi><mml:mprescripts/><mml:none/><mml:mi>C</mml:mi></mml:mmultiscripts></mml:mrow><mml:mi>T</mml:mi><mml:mi>&#x003B1;</mml:mi></mml:msubsup><mml:msub><mml:mi>X</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>&#x02248;</mml:mo><mml:mstyle displaystyle='true'><mml:munderover><mml:mo>&#x02211;</mml:mo><mml:mrow><mml:mi>j</mml:mi><mml:mo>=</mml:mo><mml:mn>0</mml:mn></mml:mrow><mml:mi>H</mml:mi></mml:munderover><mml:mrow><mml:msub><mml:mi>w</mml:mi><mml:mi>j</mml:mi></mml:msub></mml:mrow></mml:mstyle><mml:msub><mml:mi>X</mml:mi><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x02212;</mml:mo><mml:mi>j</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo><mml:mtext>&#x02003;&#x02003;</mml:mtext><mml:msub><mml:mi>w</mml:mi><mml:mi>j</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:msup><mml:mrow><mml:mo stretchy='false'>(</mml:mo><mml:mo>&#x02212;</mml:mo><mml:mn>1</mml:mn><mml:mo stretchy='false'>)</mml:mo></mml:mrow><mml:mi>j</mml:mi></mml:msup><mml:mrow><mml:mo>(</mml:mo><mml:mtable><mml:mtr><mml:mtd><mml:mi>&#x003B1;</mml:mi></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mi>j</mml:mi></mml:mtd></mml:mtr></mml:mtable><mml:mo>)</mml:mo></mml:mrow><mml:mo>.</mml:mo></mml:mrow></mml:math><label>(6)</label></disp-formula>
<p>where <italic>H</italic> denotes the memory horizon that limits the fractional historical dependence. This discretization yields an efficient and numerically stable implementation suitable for large-scale dynamic graphs.</p>
<p>From a modeling perspective, the window <italic>H</italic> defines the effective memory horizon of the Caputo operator: larger <italic>H</italic> retains longer-range temporal dependence, but also increases computational cost. In practice, we require <italic>H</italic>&#x0226A;<italic>T</italic>, where <italic>T</italic> is the sequence length, and we find that choosing <italic>H</italic>/<italic>T</italic> in a small range (around 5&#x02013;15% of <italic>T</italic>) preserves the desired long-memory behavior while keeping the overall complexity near-linear. The dataset-specific choices of <italic>H</italic> are summarized in Section 4.3.</p>
<p>For the collective evolution of all node features, we express the fractional-order neural dynamics in matrix form:</p>
<disp-formula id="EQ7"><mml:math id="M10"><mml:mrow><mml:msubsup><mml:mrow><mml:mmultiscripts><mml:mi>D</mml:mi><mml:mprescripts/><mml:none/><mml:mi>C</mml:mi></mml:mmultiscripts></mml:mrow><mml:mi>T</mml:mi><mml:mi>&#x003B1;</mml:mi></mml:msubsup><mml:msub><mml:mi>X</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>=</mml:mo><mml:mo>&#x02212;</mml:mo><mml:mi>C</mml:mi><mml:msub><mml:mi>X</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>+</mml:mo><mml:mi>W</mml:mi><mml:mi>f</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:msub><mml:mi>X</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo stretchy='false'>)</mml:mo><mml:mo>+</mml:mo><mml:msub><mml:mi>U</mml:mi><mml:mi>t</mml:mi></mml:msub><mml:mo>,</mml:mo></mml:mrow></mml:math><label>(7)</label></disp-formula>
<p>where <italic>X</italic><sub><italic>t</italic></sub> is the node feature matrix, <italic>C</italic> is a leakage coefficient matrix, <italic>W</italic> is a weight matrix for inter-node connections, and <italic>U</italic><sub><italic>t</italic></sub> represents external forcing. This matrix form guides the temporal propagation of node representations within our F<sup>2</sup>-CommNet model.</p></sec>
<sec>
<label>3.3</label>
<title>Fourier spectral filtering</title>
<p>Let <italic>L</italic> &#x0003D; <italic>U&#x0039B;U</italic><sup>&#x022A4;</sup> denote the Laplacian decomposition, where &#x0039B; &#x0003D; diag(&#x003BB;<sub>1</sub>, &#x02026;, &#x003BB;<sub><italic>n</italic></sub>) contains eigenvalues and <italic>U</italic> &#x0003D; [<italic>u</italic><sub>1</sub>, &#x02026;, <italic>u</italic><sub><italic>n</italic></sub>] the corresponding eigenvectors. Spectral projection and reconstruction are expressed as</p>
<disp-formula id="EQ8"><mml:math id="M11"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mover accent="true"><mml:mrow><mml:mi>x</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:msup><mml:mrow><mml:mi>U</mml:mi></mml:mrow><mml:mrow><mml:mo>&#x022A4;</mml:mo></mml:mrow></mml:msup><mml:mi>x</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mtext>&#x02003;&#x000A0;</mml:mtext><mml:mi>x</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>U</mml:mi><mml:mover accent="true"><mml:mrow><mml:mi>x</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>.</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(8)</label></disp-formula>
<p>For each eigenmode <italic>u</italic><sub><italic>k</italic></sub>, the hallucination index is defined by</p>
<disp-formula id="EQ9"><mml:math id="M12"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mi>&#x003B7;</mml:mi></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mtext>&#x003BB;</mml:mtext></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mi>F</mml:mi><mml:mo>-</mml:mo><mml:msub><mml:mrow><mml:mi>c</mml:mi></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(9)</label></disp-formula>
<p>where <italic>F</italic> is the forcing gain and <italic>c</italic><sub><italic>k</italic></sub> is the leakage term. Modes with &#x003B7;<sub><italic>k</italic></sub>&#x0003E;0 are deemed unstable, while &#x003B7;<sub><italic>k</italic></sub> &#x0003C; 0 indicates spectral stability. Because high-frequency eigenmodes (large &#x003BB;<sub><italic>k</italic></sub>) amplify noise, F<sup>2</sup>-CommNet employs adaptive Fourier filtering:</p>
<disp-formula id="EQ10"><mml:math id="M13"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>x</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>&#x021A6;</mml:mo><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>x</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mi>&#x003D5;</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msub><mml:mrow><mml:mtext>&#x003BB;</mml:mtext></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(10)</label></disp-formula>
<p>where &#x003D5;(&#x003BB;<sub><italic>k</italic></sub>) is a decay kernel that suppresses unstable modes and preserves low-frequency structure.</p>
<p>Integrating the spectral projection, adaptive filtering (where &#x003D5;(&#x003BB;<sub><italic>k</italic></sub>) is often parameterized as <italic>g</italic><sub>&#x003B8;</sub>(&#x0039B;)), and reconstruction, the comprehensive Fourier spectral filtering operation for the feature matrix <inline-formula><mml:math id="M14"><mml:msubsup><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>&#x003B1;</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow></mml:msubsup></mml:math></inline-formula> is expressed as:</p>
<disp-formula id="EQ11"><mml:math id="M15"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mi>U</mml:mi><mml:msub><mml:mrow><mml:mi>g</mml:mi></mml:mrow><mml:mrow><mml:mi>&#x003B8;</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mtext>&#x0039B;</mml:mtext></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:msup><mml:mrow><mml:mi>U</mml:mi></mml:mrow><mml:mrow><mml:mo>&#x022A4;</mml:mo></mml:mrow></mml:msup><mml:msubsup><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>&#x003B1;</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow></mml:msubsup><mml:mo>.</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(11)</label></disp-formula>
<p>This operation effectively purifies the node embeddings by removing noise-amplifying high-frequency components.</p>
<p>Intuitively, the decay kernel &#x003D5;(&#x003BB;<sub><italic>k</italic></sub>) controls the degree of suppression applied to each spectral mode. High-frequency components associated with large eigenvalues &#x003BB;<sub><italic>k</italic></sub> tend to exhibit oscillatory or unstable behavior in dynamic graphs. A stronger decay factor, therefore, effectively damps these fluctuations, reducing the likelihood of hallucinated communities while preserving low-frequency structural information.</p></sec>
<sec>
<label>3.4</label>
<title>Stability guarantees</title>
<p>We summarize the key ideas behind the stability analysis and present the main results in a concise form for improved readability.</p>
<p><bold>Error dynamics and hallucinations</bold>. Let <italic>x</italic><sup>&#x0002A;</sup>(<italic>t</italic>) denote an ideal (hallucination-free) community trajectory and define the deviation</p>
<disp-formula id="EQ12"><mml:math id="M16"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mi>e</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>x</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>-</mml:mo><mml:msup><mml:mrow><mml:mi>x</mml:mi></mml:mrow><mml:mrow><mml:mo>*</mml:mo></mml:mrow></mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>.</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(12)</label></disp-formula>
<p>The fractional-order error dynamics can be written as</p>
<disp-formula id="EQ13"><mml:math id="M17"><mml:mrow><mml:msubsup><mml:mrow><mml:mmultiscripts><mml:mi>D</mml:mi><mml:mprescripts/><mml:none/><mml:mi>C</mml:mi></mml:mmultiscripts></mml:mrow><mml:mi>T</mml:mi><mml:mi>&#x003B1;</mml:mi></mml:msubsup><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>=</mml:mo><mml:mo>&#x02212;</mml:mo><mml:mo stretchy='false'>(</mml:mo><mml:mi>C</mml:mi><mml:mo>&#x02212;</mml:mo><mml:mi>F</mml:mi><mml:mi>W</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mi>e</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>+</mml:mo><mml:mi>&#x00394;</mml:mi><mml:mi>u</mml:mi><mml:mo stretchy='false'>(</mml:mo><mml:mi>t</mml:mi><mml:mo stretchy='false'>)</mml:mo><mml:mo>,</mml:mo></mml:mrow></mml:math><label>(13)</label></disp-formula>
<p>where &#x00394;<italic>u</italic>(<italic>t</italic>) models perturbations such as noise or modeling mismatch. Intuitively, a persistent non-zero <italic>e</italic>(<italic>t</italic>) corresponds to <italic>hallucinated</italic> community states.</p>
<p><bold>Lyapunov function and Mittag&#x02013;Leffler bound</bold>. We consider the quadratic Lyapunov function</p>
<disp-formula id="EQ14"><mml:math id="M18"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mi>V</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>=</mml:mo><mml:mi>e</mml:mi><mml:msup><mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mo>&#x022A4;</mml:mo></mml:mrow></mml:msup><mml:mi>P</mml:mi><mml:mi>e</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo><mml:mtext>&#x02003;</mml:mtext><mml:mi>P</mml:mi><mml:mo>&#x0227B;</mml:mo><mml:mn>0</mml:mn><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(14)</label></disp-formula>
<p>with <italic>P</italic> symmetric positive definite. If there exists <italic>P</italic>&#x0227B;0 and a margin &#x003C1;&#x0003E;0 such that</p>
<disp-formula id="EQ15"><mml:math id="M19"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mi>P</mml:mi><mml:mi>C</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:msup><mml:mrow><mml:mi>C</mml:mi></mml:mrow><mml:mrow><mml:mo>&#x022A4;</mml:mo></mml:mrow></mml:msup><mml:mi>P</mml:mi><mml:mo>-</mml:mo><mml:mn>2</mml:mn><mml:mi>F</mml:mi><mml:mi>P</mml:mi><mml:mi>W</mml:mi><mml:mo>&#x0227A;</mml:mo><mml:mo>-</mml:mo><mml:mi>&#x003C1;</mml:mi><mml:mi>I</mml:mi><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(15)</label></disp-formula>
<p>then the error norm admits the fractional-order bound</p>
<disp-formula id="EQ16"><mml:math id="M20"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mo>||</mml:mo><mml:mi>e</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>||</mml:mo><mml:mo>&#x02264;</mml:mo><mml:mo>||</mml:mo><mml:mi>e</mml:mi><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>0</mml:mn></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>||</mml:mo><mml:msub><mml:mrow><mml:mi>E</mml:mi></mml:mrow><mml:mrow><mml:mi>&#x003B1;</mml:mi></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mo>-</mml:mo><mml:mi>&#x003C1;</mml:mi><mml:msup><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mi>&#x003B1;</mml:mi></mml:mrow></mml:msup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>&#x0002B;</mml:mo><mml:mfrac><mml:mrow><mml:msub><mml:mrow><mml:mtext>&#x003BB;</mml:mtext></mml:mrow><mml:mrow><mml:mo class="qopname">max</mml:mo></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>P</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mrow><mml:mrow><mml:mi>&#x003C1;</mml:mi></mml:mrow></mml:mfrac><mml:mi>&#x0016B;</mml:mi><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(16)</label></disp-formula>
<p>where <italic>E</italic><sub>&#x003B1;</sub>(&#x000B7;) is the Mittag&#x02013;Leffler function and ||&#x00394;<italic>u</italic>(<italic>t</italic>)|| &#x02264; &#x0016B;. The Mittag&#x02013;Leffler term generalizes the exponential decay in integer-order systems, capturing the memory-dependent, non-local convergence behavior of fractional dynamics.</p>
<p><xref ref-type="disp-formula" rid="EQ16">Equation 16</xref> implies that the error is ultimately bounded and cannot diverge if &#x003C1;&#x0003E;0 and &#x0016B; is finite, thereby preventing long-term hallucinations.</p>
<p><bold>Spectral stability margin and fractional effect</bold>. For interpretability, we summarize the stability condition using a spectral margin</p>
<disp-formula id="EQ17"><mml:math id="M21"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mi>&#x003C1;</mml:mi><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mtext>&#x003BB;</mml:mtext></mml:mrow><mml:mrow><mml:mo class="qopname">min</mml:mo></mml:mrow></mml:msub><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>P</mml:mi><mml:mi>C</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>-</mml:mo><mml:mi>F</mml:mi><mml:mo>||</mml:mo><mml:mi>P</mml:mi><mml:mi>W</mml:mi><mml:mo>||</mml:mo><mml:mo>.</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(17)</label></disp-formula>
<p>A larger &#x003C1; indicates a larger region of attraction and stronger robustness to perturbations. In fractional-order dynamics (&#x003B1; &#x0003C; 1), the effective forcing term is attenuated by a factor depending on &#x00393;(1&#x02212;&#x003B1;), which increases the margin &#x003C1; compared to the integer-order case. As a result, fewer Laplacian eigenmodes violate the stability condition, and the hallucination indices &#x003B7;<sub><italic>k</italic></sub> tend to become negative, which matches the empirical reduction in unstable high-frequency modes.</p>
<p><bold>Summary</bold>. Rather than providing full proofs, we emphasize the practical implications: (i) the Lyapunov condition (<xref ref-type="disp-formula" rid="EQ15">Equation 15</xref>) and margin (<xref ref-type="disp-formula" rid="EQ17">Equation 17</xref>) quantify robustness against hallucinations; (ii) fractional dynamics enlarge this stability region; and (iii) Fourier spectral filtering further pushes &#x003B7;<sub><italic>k</italic></sub> to the stable regime. These theoretical insights are empirically validated by the stability and hallucination metrics reported in Section 4.</p>
<p>The stability analysis adheres to conventional fractional-order Lyapunov theory and necessitates two technical prerequisites: (i) the non-linear activation <italic>f</italic>(&#x000B7;) exhibits Lipschitz continuity, and (ii) the external disturbances are constrained. In practical GNN training, these assumptions are approximately valid. Initially, while ReLU is not globally Lipschitz at the origin, it is piecewise 1-Lipschitz virtually universally, and contemporary optimizers (such as Adam with minimal step sizes) maintain the iterates inside compact domains where the local Lipschitz constant remains finite. This relaxation is widely adopted in stability analyses of neural and graph-based models (<xref ref-type="bibr" rid="B30">Pascanu et al., 2013</xref>; <xref ref-type="bibr" rid="B35">Scaman and Virmaux, 2018</xref>). Second, the disturbances induced by stochastic training noise and spectral approximation errors remain bounded due to gradient clipping, finite-step discretization, and the bounded magnitude of node embeddings. Finally, the Lyapunov matrix <italic>P</italic> is computed numerically using the classical Bartels&#x02013;Stewart solver (<xref ref-type="bibr" rid="B11">Golub and Van Loan, 2013</xref>), which ensures <italic>P</italic>&#x0227B;0 in all experiments. Together, these considerations justify the applicability of the theoretical assumptions in real training scenarios while preserving the rigor of the stability guarantees.</p></sec>
<sec>
<label>3.5</label>
<title>Stability-aware embedding refinement</title>
<p>To practically enforce the Lyapunov stability conditions and ensure bounded error dynamics, our framework actively refines the node embeddings. The next-step embeddings <italic>X</italic><sub><italic>t</italic>&#x0002B;1</sub> are determined by solving an optimization problem that balances fidelity to the spectrally filtered embeddings <inline-formula><mml:math id="M22"><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mo>^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub></mml:math></inline-formula> with a regularization term directly tied to the system&#x00027;s stability:</p>
<disp-formula id="EQ18"><mml:math id="M23"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mrow><mml:mi>t</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mo class="qopname">arg</mml:mo><mml:mstyle displaystyle="true"><mml:munder class="msub"><mml:mrow><mml:mo class="qopname">min</mml:mo></mml:mrow><mml:mrow><mml:mi>Z</mml:mi></mml:mrow></mml:munder></mml:mstyle><mml:mo>||</mml:mo><mml:mi>Z</mml:mi><mml:mo>-</mml:mo><mml:msub><mml:mrow><mml:mover accent="true"><mml:mrow><mml:mi>X</mml:mi></mml:mrow><mml:mo class="qopname">^</mml:mo></mml:mover></mml:mrow><mml:mrow><mml:mi>t</mml:mi></mml:mrow></mml:msub><mml:msup><mml:mrow><mml:mo>||</mml:mo></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup><mml:mo>&#x0002B;</mml:mo><mml:mtext>&#x003BB;</mml:mtext><mml:msup><mml:mrow><mml:mi>&#x003C1;</mml:mi></mml:mrow><mml:mrow><mml:mo>-</mml:mo><mml:mn>1</mml:mn></mml:mrow></mml:msup><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>Z</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(18)</label></disp-formula>
<p>where &#x003BB;&#x0003E;0 is a hyperparameter balancing the two terms. Here, &#x003C1;(<italic>Z</italic>) represents the spectral stability margin associated with the candidate embedding <italic>Z</italic>. By minimizing &#x003C1;<sup>&#x02212;1</sup>(<italic>Z</italic>), the optimization actively guides the model toward configurations that maximize &#x003C1;, thereby enhancing system stability and mitigating the formation of hallucination-prone structures. This term ensures that the information energy in the embeddings remains bounded, preventing persistent instability or convergence to spurious equilibria.</p>
<p>The stability-aware refinement and clustering stage is closely interconnected: modularity is optimized on embeddings <italic>X</italic><sub><italic>t</italic>&#x0002B;1</sub> that have been specifically regularized by the Lyapunov margin &#x003C1;(<italic>Z</italic>). The hallucination indices {&#x003B7;<sub><italic>k</italic></sub>} serve as diagnostic metrics to assess if the resultant communities engage unstable spectral modes. In this context, stability considerations indirectly influence the final partition via embedding refinement, but the clustering target continues to adhere to the conventional modularity metric.</p></sec>
<sec>
<label>3.6</label>
<title>Algorithm</title>
<p>We summarize the complete training and inference workflow of F<sup>2</sup>-CommNet in <xref ref-type="statement" rid="algo1">Algorithm 1</xref> and <xref ref-type="fig" rid="F1">Figure 1</xref>, which integrates fractional dynamics, Fourier spectral filtering, Lyapunov stability monitoring, and stability-aware modularity optimization.</p>
<statement content-type="algorithm" id="algo1">
<label>Algorithm 1</label>
<title>F<sup>2</sup>-CommNet Update Rule.</title>
<p>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-i0001.tif"/>
</p>
</statement>
</sec>
<sec>
<label>3.7</label>
<title>Complexity analysis</title>
<p>We analyze the computational complexity of F<sup>2</sup>-CommNet in both training and inference phases by decomposing its workflow into the major steps of <xref ref-type="statement" rid="algo1">Algorithm 1</xref>. Let <italic>n</italic> &#x0003D; |<italic>V</italic>| be the number of nodes, <italic>d</italic> the embedding dimension, <italic>H</italic> the effective memory horizon for fractional dynamics, and <italic>r</italic>&#x0226A;<italic>n</italic> the number of leading eigenpairs retained for spectral decomposition.</p>
<sec>
<label>3.7.1</label>
<title>Training phase</title>
<p>For each snapshot, three main costs dominate:</p>
<list list-type="bullet">
<list-item><p><bold>Fractional Dynamics</bold>. Updating embeddings under Caputo fractional dynamics requires convolution with <italic>H</italic> past states, leading to <inline-formula><mml:math id="M28"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>.</p></list-item>
<list-item><p><bold>Spectral Decomposition</bold>. A full Laplacian eigen-decomposition costs <inline-formula><mml:math id="M29"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mi>n</mml:mi></mml:mrow><mml:mrow><mml:mn>3</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>, but in practice only the top <italic>r</italic> modes are approximated using Lanczos or randomized SVD, giving <inline-formula><mml:math id="M30"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>.</p></list-item>
<list-item><p><bold>Spectral Filtering</bold>. Multiplying embeddings by the spectral kernel &#x003D5;(&#x0039B;<sub><italic>t</italic></sub>) requires <inline-formula><mml:math id="M31"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>.</p></list-item>
<list-item><p><bold>Stability Monitoring</bold>. Computing hallucination indices &#x003B7;<sub><italic>k</italic></sub> for <italic>r</italic> modes and the Lyapunov margin &#x003C1; costs <inline-formula><mml:math id="M32"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>r</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:msup><mml:mrow><mml:mi>d</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>, negligible compared to spectral steps.</p></list-item>
<list-item><p><bold>Community Partitioning</bold>. Modularity-based clustering of <italic>n</italic> nodes requires <inline-formula><mml:math id="M33"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>.</p></list-item>
</list>
<p>Thus, the per-snapshot training complexity is approximately</p>
<disp-formula id="EQ19"><mml:math id="M34"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>.</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(19)</label></disp-formula></sec>
<sec>
<label>3.7.2</label>
<title>Inference phase</title>
<p>During inference, no parameter updates are performed. Each new snapshot requires:</p>
<list list-type="bullet">
<list-item><p>Fractional propagation <inline-formula><mml:math id="M35"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> with truncated history.</p></list-item>
<list-item><p>Approximate eigen-decomposition <inline-formula><mml:math id="M36"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>.</p></list-item>
<list-item><p>Spectral filtering and stability evaluation <inline-formula><mml:math id="M37"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>.</p></list-item>
<list-item><p>Community assignment <inline-formula><mml:math id="M38"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>.</p></list-item>
</list>
<p>Hence, the per-snapshot inference complexity is</p>
<disp-formula id="EQ20"><mml:math id="M39"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>.</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(20)</label></disp-formula></sec>
<sec>
<label>3.7.3</label>
<title>Comparison</title>
<p>Both training and inference scale nearly linearly with <italic>n</italic> when <italic>H</italic> and <italic>r</italic> are moderate, making F<sup>2</sup>-CommNet applicable to large-scale graphs. In practice, we use a truncated memory horizon <italic>H</italic> whose value is selected via a small validation sweep for each dataset (see Section 4.3). This keeps the fractional update efficient while preserving the long-memory behavior required by fractional dynamics. We also retain only <italic>r</italic>&#x0226A;<italic>n</italic> leading spectral modes, which together ensure that F<sup>2</sup>-CommNet remains computationally tractable even for large dynamic graphs.</p>
<p><xref ref-type="table" rid="T1">Table 1</xref> presents the complexity analysis of each major component in F<sup>2</sup>-CommNet. The <italic>Fractional Dynamics</italic> step incurs a cost of <inline-formula><mml:math id="M40"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>, linear in the number of nodes and embedding dimension over the memory horizon. The <italic>Spectral Decomposition</italic> requires an approximate eigen-decomposition of the Laplacian, with complexity <inline-formula><mml:math id="M41"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> depending on the retained eigenmodes <italic>r</italic>. <italic>Spectral Filtering</italic> and <italic>Community Partitioning</italic> both scale linearly with <inline-formula><mml:math id="M42"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>, while <italic>Stability Monitoring</italic> adds a smaller overhead of <inline-formula><mml:math id="M43"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>r</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:msup><mml:mrow><mml:mi>d</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>.</p>
<table-wrap position="float" id="T1">
<label>Table 1</label>
<caption><p>Complexity analysis of F<sup>2</sup>-CommNet components.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Component</bold></th>
<th valign="top" align="left"><bold>Complexity</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Fractional domposition</td>
<td valign="top" align="left"><inline-formula><mml:math id="M46"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> (approximate)</td>
</tr>
<tr>
<td valign="top" align="left">Spectral filtering</td>
<td valign="top" align="left"><inline-formula><mml:math id="M47"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
</tr>
<tr>
<td valign="top" align="left">Stability monitoring</td>
<td valign="top" align="left"><inline-formula><mml:math id="M48"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>r</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:msup><mml:mrow><mml:mi>d</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
</tr>
<tr>
<td valign="top" align="left">Community partitioning</td>
<td valign="top" align="left"><inline-formula><mml:math id="M49"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
</tr>
<tr>
<td valign="top" align="left">Training (per snapshot)</td>
<td valign="top" align="left"><inline-formula><mml:math id="M50"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
</tr>
<tr>
<td valign="top" align="left">Inference (per snapshot)</td>
<td valign="top" align="left"><inline-formula><mml:math id="M51"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula></td>
</tr></tbody>
</table>
<table-wrap-foot>
<p><italic>n</italic>, number of nodes; <italic>d</italic>, embedding dimension; <italic>H</italic>, memory horizon; <italic>r</italic>, retained eigenmodes.</p>
</table-wrap-foot>
</table-wrap>
<p>Aggregating these terms, the overall <italic>training</italic> complexity per snapshot is <inline-formula><mml:math id="M44"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>d</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula>, while the <italic>inference</italic> complexity per snapshot reduces to <inline-formula><mml:math id="M45"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>n</mml:mi><mml:mi>H</mml:mi><mml:mi>d</mml:mi><mml:mo>&#x0002B;</mml:mo><mml:mi>n</mml:mi><mml:mi>r</mml:mi><mml:mo class="qopname">log</mml:mo><mml:mi>n</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> since no optimization of <italic>W, C, P</italic> is required. This shows that F<sup>2</sup>-CommNet scales near-linearly with respect to the graph size <italic>n</italic>, and remains practical for large dynamic networks while still incorporating fractional dynamics and stability-aware monitoring.</p>
<p>Computational Complexity and Industrial Scalability. The near-linear scaling of F<sup>2</sup>-CommNet with respect to node size <italic>n</italic> and embedding dimension <italic>d</italic> is crucial in industrial contexts where graphs can contain millions of entities. By limiting the memory horizon <italic>H</italic> and the number of retained eigenmodes <italic>r</italic>&#x0226A;<italic>n</italic>, the framework ensures that training and inference remain tractable even for large-scale dynamic networks such as e-commerce transaction graphs, financial fraud monitoring, or communication networks. This scalability makes the method suitable for real-time or near-real-time deployment, where stability guarantees are essential to avoid spurious community alarms. Compared to baseline models, the fractional-Fourier design provides not only improved accuracy but also predictable resource usage, a key requirement in production environments.</p></sec></sec>
<sec>
<label>3.8</label>
<title>Summary of methodology</title>
<p>F<sup>2</sup>-CommNet amalgamates fractional-order neural dynamics, Fourier spectrum filtering, and Lyapunov-guided refinement into a cohesive stability-aware framework for dynamic community discovery. Fractional dynamics facilitate long-memory smoothing, whereas spectral filtering mitigates high-frequency modes susceptible to hallucinations. The resultant stability margin &#x003C1; and hallucination index &#x003B7;<sub><italic>k</italic></sub> offer comprehensible robustness assurances, while the entire pipeline exhibits near-linear scalability, facilitating implementation on extensive dynamic graphs.</p></sec></sec>
<sec id="s4">
<label>4</label>
<title>Experiments</title>
<p>This section presents a comprehensive evaluation of F<sup>2</sup>-CommNet. We aim to answer the following research questions:</p>
<list list-type="simple">
<list-item><p><bold>Q1</bold> Does F<sup>2</sup>-CommNet improve stability margins &#x003C1; and reduce hallucination indices &#x003B7;<sub><italic>k</italic></sub> compared to existing methods?</p></list-item>
<list-item><p><bold>Q2</bold> How does it perform on classical clustering metrics such as ARI, NMI, and modularity <italic>Q</italic>?</p></list-item>
<list-item><p><bold>Q3</bold> What is the contribution of each component (fractional dynamics, Fourier filtering, Lyapunov stability) in the overall framework?</p></list-item>
<list-item><p><bold>Q4</bold> How sensitive is the model to hyperparameters such as fractional order &#x003B1;, leakage coefficient <italic>c</italic><sub><italic>i</italic></sub>, embedding dimension, and window size?</p></list-item>
</list>
<sec>
<label>4.1</label>
<title>Datasets</title>
<p>To evaluate the effectiveness and robustness of F<sup>2</sup>-CommNet, we conduct experiments on a diverse set of real-world and synthetic dynamic networks. All datasets are preprocessed into temporal snapshots {<italic>G</italic><sub><italic>t</italic></sub>} with consistent node sets and evolving edge relations. Statistics are summarized in <xref ref-type="table" rid="T2">Table 2</xref>.</p>
<table-wrap position="float" id="T2">
<label>Table 2</label>
<caption><p>Statistics of datasets used in experiments.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Dataset</bold></th>
<th valign="top" align="center"><bold><italic>n</italic></bold></th>
<th valign="top" align="center"><bold><italic>n</italic><sub><italic>e</italic></sub></bold></th>
<th valign="top" align="center"><bold><italic>T</italic></bold></th>
<th valign="top" align="left"><bold>Domain</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Enron email (EN)</td>
<td valign="top" align="center">36,692</td>
<td valign="top" align="center">367,662</td>
<td valign="top" align="center">12</td>
<td valign="top" align="left">Communication</td>
</tr>
<tr>
<td valign="top" align="left">DBLP co-authorship</td>
<td valign="top" align="center">317,080</td>
<td valign="top" align="center">1,049,866</td>
<td valign="top" align="center">20</td>
<td valign="top" align="left">Collaboration</td>
</tr>
<tr>
<td valign="top" align="left">Cora citation (Cora-TS)</td>
<td valign="top" align="center">19,793</td>
<td valign="top" align="center">126,842</td>
<td valign="top" align="center">10</td>
<td valign="top" align="left">Citation</td>
</tr>
<tr>
<td valign="top" align="left">Reddit hyperlink</td>
<td valign="top" align="center">55,863</td>
<td valign="top" align="center">858,490</td>
<td valign="top" align="center">15</td>
<td valign="top" align="left">Social Media</td>
</tr>
<tr>
<td valign="top" align="left">UCI messages</td>
<td valign="top" align="center">1,899</td>
<td valign="top" align="center">59,835</td>
<td valign="top" align="center">22</td>
<td valign="top" align="left">Communication</td>
</tr>
<tr>
<td valign="top" align="left">Human PPI</td>
<td valign="top" align="center">3,852</td>
<td valign="top" align="center">76,584</td>
<td valign="top" align="center">8</td>
<td valign="top" align="left">Biological</td>
</tr>
<tr>
<td valign="top" align="left">Synthetic SBM (Syn-SBM)</td>
<td valign="top" align="center">10,000</td>
<td valign="top" align="center">80,000</td>
<td valign="top" align="center">10</td>
<td valign="top" align="left">Synthetic</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p><italic>n</italic>, number of nodes; <italic>n</italic><sub><italic>e</italic></sub>, number of edges; <italic>T</italic>, number of snapshots.</p>
</table-wrap-foot>
</table-wrap>
<p><bold>Enron Email Network (EN)</bold> (<xref ref-type="bibr" rid="B18">Kojaku et al., 2024</xref>): A communication dataset with <italic>n</italic> &#x0003D; 36, 692 nodes and 367, 662 edges, where nodes are employees and edges represent time-stamped email exchanges. Communities correspond to functional groups within the company.</p>
<p><bold>DBLP Co-authorship (DBLP)</bold> (<xref ref-type="bibr" rid="B10">Diboune et al., 2024</xref>): A co-authorship graph with <italic>n</italic> &#x0003D; 317, 080 authors and 1, 049, 866 edges. Snapshots are constructed yearly, reflecting the evolution of research communities.</p>
<p><bold>Cora Citation Network (Cora-TS)</bold> (<xref ref-type="bibr" rid="B14">Hu et al., 2020</xref>): A citation graph adapted into temporal slices, with <italic>n</italic> &#x0003D; 19, 793 articles and 126, 842 citations. Node attributes are bag-of-words features; communities reflect scientific subfields.</p>
<p><bold>Reddit Hyperlink Network (Reddit)</bold> (<xref ref-type="bibr" rid="B15">Kaiser et al., 2023</xref>): A large-scale temporal network with <italic>n</italic> &#x0003D; 55, 863 nodes and 858, 490 edges, where nodes are subreddits and edges represent hyperlinks shared by users. Community structure aligns with topical categories.</p>
<p><bold>UCI Messages (UCI)</bold> (<xref ref-type="bibr" rid="B32">Prokop et al., 2024</xref>): A dynamic communication dataset with <italic>n</italic> &#x0003D; 1, 899 nodes and 59, 835 edges, representing private message exchanges on an online forum. Snapshots are segmented weekly to capture evolving social groups.</p>
<p><bold>Human Protein-Protein Interaction (PPI)</bold> (<xref ref-type="bibr" rid="B27">Oughtred et al., 2021</xref>): A biological network with <italic>n</italic> &#x0003D; 3, 852 proteins and 76, 584 interactions. Communities correspond to functional protein complexes, with dynamics reflecting newly discovered interactions.</p>
<p><bold>Synthetic Dynamic SBM (Syn-SBM)</bold> (<xref ref-type="bibr" rid="B31">Peixoto, 2019</xref>): A synthetic dynamic stochastic block model with <italic>n</italic> &#x0003D; 10, 000 nodes and 4 evolving communities. To study stability and hallucination resistance under noise, we inject temporal perturbations by randomly rewiring a proportion <italic>p</italic> of edges per snapshot. We consider three noise levels <italic>p</italic> &#x02208; {0.02, 0.05, 0.10} (low, moderate, high). Unless otherwise specified, the main experiments use <italic>p</italic> &#x0003D; 0.05, while Section 4.12 evaluates robustness across all noise settings.</p></sec>
<sec>
<label>4.2</label>
<title>Baselines</title>
<p>We evaluate F<sup>2</sup>-CommNet against a diverse set of baselines spanning static, spectral, temporal, and stability-aware approaches:</p>
<p><bold>Static GNNs:</bold> Graph Convolutional Network (GCN) (<xref ref-type="bibr" rid="B17">Kipf and Welling, 2017</xref>), Graph Attention Network (GAT) (<xref ref-type="bibr" rid="B38">Velickovic et al., 2018</xref>).</p>
<p><bold>Spectral methods:</bold> Spectral Clustering (SC) (<xref ref-type="bibr" rid="B36">Shah, 2022</xref>).</p>
<p><bold>Temporal GNNs:</bold> Temporal Graph Network (TGN) (<xref ref-type="bibr" rid="B34">Rossi et al., 2020</xref>), Dynamic Graph Convolutional Network (DyGCN) (<xref ref-type="bibr" rid="B24">Manessi et al., 2020</xref>).</p>
<p><bold>Stability-enhanced methods:</bold> EvolveGCN (<xref ref-type="bibr" rid="B29">Pareja et al., 2020</xref>).</p>
<p><bold>Proposed:</bold> F<sup>2</sup>-CommNet.</p>
<p><xref ref-type="table" rid="T3">Table 3</xref> summarizes the taxonomy of baseline methods considered in our experiments. We divide existing approaches into four main categories: (i) <bold>Static GNNs</bold> such as GCN and GAT, which capture spectral properties but lack temporal modeling and stability control; (ii) <bold>Spectral methods</bold> such as Spectral Clustering, which operate purely in the eigen-space of the Laplacian without temporal adaptation; (iii) <bold>Temporal GNNs</bold>, including TGN and DyGCN, which extend GNNs with dynamic node updates but still lack explicit hallucination suppression; and (iv) <bold>Stability-enhanced methods</bold> such as EvolveGCN, which introduce mechanisms to handle evolving graphs but without formal stability guarantees.</p>
<table-wrap position="float" id="T3">
<label>Table 3</label>
<caption><p>Taxonomy of baselines.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Model</bold></th>
<th valign="top" align="center"><bold>Temporal</bold></th>
<th valign="top" align="center"><bold>Spectral</bold></th>
<th valign="top" align="center"><bold>Attention</bold></th>
<th valign="top" align="center"><bold>Stability</bold></th>
<th valign="top" align="center"><bold>Hallucination control</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">GCN</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
</tr>
<tr>
<td valign="top" align="left">GAT</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
</tr>
<tr>
<td valign="top" align="left">Spectral clustering</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
<td/>
</tr>
<tr>
<td valign="top" align="left">TGN</td>
<td valign="top" align="center">&#x02713;</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td/>
</tr>
<tr>
<td valign="top" align="left">DyGCN</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td/>
<td/>
<td/>
</tr>
<tr>
<td valign="top" align="left">EvolveGCN</td>
<td valign="top" align="center">&#x02713;</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
</tr>
<tr>
<td valign="top" align="left">F<sup>2</sup>-CommNet</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>A &#x02713;indicates explicit support for the property.</p>
</table-wrap-foot>
</table-wrap>
<p>The proposed <bold>F</bold><sup><bold>2</bold></sup><bold>-CommNet</bold> unifies these perspectives by simultaneously supporting temporal modeling, spectral filtering, attention-based aggregation, Lyapunov-guided stability monitoring, and hallucination control. As shown in <xref ref-type="table" rid="T3">Table 3</xref>, it is the only method that explicitly checks all five properties, highlighting its principled design and broader coverage compared with existing baselines.</p>
<p>We also evaluated Transformer-style dynamic graph designs as possible baselines. Nonetheless, current graph Transformers are predominantly engineered for <italic>supervised</italic> temporal prediction tasks, including link prediction or node forecasting with time-stamped labels, and generally depend on quadratic self-attention methods. The two qualities render them unsuitable for our context, where (i) the aim is <italic>unsupervised</italic> structural community discovery instead of predictive accuracy, and (ii) extensive dynamic graphs necessitate near-linear scalability rather than attention-based <inline-formula><mml:math id="M52"><mml:mrow><mml:mi mathvariant="script">O</mml:mi></mml:mrow><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:msup><mml:mrow><mml:mi>n</mml:mi></mml:mrow><mml:mrow><mml:mn>2</mml:mn></mml:mrow></mml:msup></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:math></inline-formula> complexity. Modifying these transformer-based models for our label-free clustering task necessitates essential alterations to their architectures and training aims, resulting in indirect and sometimes inequitable comparisons. Consequently, adhering to known methodologies in dynamic community detection, we utilize GCN, GAT, SC, TGN, DyGCN, and EvolveGCN as the most representative and directly comparable benchmarks.</p>
<p>We also evaluated whether recent stability-enhanced GNNs, such as SO-GCN (<xref ref-type="bibr" rid="B8">Chen et al., 2025c</xref>), LDC-GAT (<xref ref-type="bibr" rid="B7">Chen et al., 2025b</xref>), and other constraint-based stability models could be included as baselines. However, these techniques are predominantly intended for semi-supervised node classification on static graphs and depend on label-driven losses, Jacobian-based regularization, or Lyapunov-style constraints, which are not applicable to our unsupervised dynamic community identification context. These limits also impose significant processing expense, rendering such models unworkable for the million-edge temporal graphs utilized in our research. Consequently, in alignment with existing practices in dynamic community detection, we see these stability-oriented systems as conceptually complementary rather than directly comparable baselines.</p>
<sec>
<label>4.2.1</label>
<title>Baselines Configuration</title>
<p>For fair comparison, hyperparameters of baseline models are selected via grid search on the validation set to minimize loss. <xref ref-type="table" rid="T4">Table 4</xref> summarizes the final choices. For models without memory modules, the &#x0201C;Memory Size&#x0201D; field is not applicable (N/A).</p>
<table-wrap position="float" id="T4">
<label>Table 4</label>
<caption><p>Final hyperparameter configurations of baseline models after validation sweeps.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Model</bold></th>
<th valign="top" align="center"><bold>Hidden dim</bold></th>
<th valign="top" align="center"><bold>Learning rate</bold></th>
<th valign="top" align="center"><bold>Layers</bold></th>
<th valign="top" align="center"><bold>Dropout</bold></th>
<th valign="top" align="center"><bold>Memory size</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">GCN</td>
<td valign="top" align="center">64</td>
<td valign="top" align="center">1 &#x000D7; 10<sup>&#x02212;3</sup></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0.1</td>
<td valign="top" align="center">N/A</td>
</tr>
<tr>
<td valign="top" align="left">GAT</td>
<td valign="top" align="center">64</td>
<td valign="top" align="center">1 &#x000D7; 10<sup>&#x02212;3</sup></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0.1</td>
<td valign="top" align="center">N/A</td>
</tr>
<tr>
<td valign="top" align="left">Spectral clustering</td>
<td valign="top" align="center">N/A</td>
<td valign="top" align="center">N/A</td>
<td valign="top" align="center">N/A</td>
<td valign="top" align="center">N/A</td>
<td valign="top" align="center">N/A</td>
</tr>
<tr>
<td valign="top" align="left">TGN</td>
<td valign="top" align="center">128</td>
<td valign="top" align="center">1 &#x000D7; 10<sup>&#x02212;3</sup></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0.1</td>
<td valign="top" align="center">200</td>
</tr>
<tr>
<td valign="top" align="left">DyGCN</td>
<td valign="top" align="center">128</td>
<td valign="top" align="center">1 &#x000D7; 10<sup>&#x02212;3</sup></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0.1</td>
<td valign="top" align="center">N/A</td>
</tr>
<tr>
<td valign="top" align="left">EvolveGCN</td>
<td valign="top" align="center">128</td>
<td valign="top" align="center">1 &#x000D7; 10<sup>&#x02212;3</sup></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0.1</td>
<td valign="top" align="center">N/A</td>
</tr>
<tr>
<td valign="top" align="left">F<sup>2</sup>-CommNet</td>
<td valign="top" align="center">64</td>
<td valign="top" align="center">1 &#x000D7; 10<sup>&#x02212;3</sup></td>
<td valign="top" align="center">2</td>
<td valign="top" align="center">0.1</td>
<td valign="top" align="center">N/A</td>
</tr></tbody>
</table>
</table-wrap></sec></sec>
<sec>
<label>4.3</label>
<title>Implementation details</title>
<p>All experiments are implemented in <monospace>PyTorch Geometric</monospace> and executed on a single NVIDIA RTX 3090 GPU with 24GB memory. The Adam optimizer is used with learning rate 10<sup>&#x02212;3</sup>, weight decay 10<sup>&#x02212;5</sup>, and embedding dimension <italic>d</italic> &#x0003D; 64. The batch size is fixed at 128, and each model is trained for 200 epochs. Early stopping with patience 20 epochs is applied to prevent overfitting. Spectral filtering uses <italic>r</italic> &#x0003D; 50 Lanczos-approximated eigenmodes.</p>
<p>For the fractional dynamics, the truncation window <italic>H</italic> is treated as a dataset-dependent hyperparameter. For each dataset, we conduct a small validation sweep over a candidate set (e.g., <italic>H</italic> &#x02208; {5, 10, 20}) and select the smallest value that yields stable training and strong validation modularity. The final choices are as follows: Enron Email (EN) and UCI Messages use <italic>H</italic> &#x0003D; 5; Human PPI, Cora-TS, and Reddit Hyperlink use <italic>H</italic> &#x0003D; 10; DBLP Co-authorship and Synthetic SBM use <italic>H</italic> &#x0003D; 20. These values remain fixed for all reported experiments to ensure full reproducibility.</p>
<p>Unless otherwise stated, all reported numbers are averaged over 10 independent runs with random seeds {0, 1, &#x02026;, 9}. For each dataset, method, and metric, we report the sample mean &#x003BC; and the corresponding 95% confidence interval &#x003BC;&#x000B1;&#x003B4;, where <inline-formula><mml:math id="M53"><mml:mi>&#x003B4;</mml:mi><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mi>t</mml:mi></mml:mrow><mml:mrow><mml:mn>0</mml:mn><mml:mo>.</mml:mo><mml:mn>975</mml:mn><mml:mo>,</mml:mo><mml:mn>9</mml:mn></mml:mrow></mml:msub><mml:mi>&#x003C3;</mml:mi><mml:mo>/</mml:mo><mml:msqrt><mml:mrow><mml:mn>10</mml:mn></mml:mrow></mml:msqrt></mml:math></inline-formula> and &#x003C3; is the sample standard deviation. This unified statistical protocol ensures a fair and robust comparison across all experiments.</p></sec>
<sec>
<label>4.4</label>
<title>Large-scale experiments on Reddit and DBLP</title>
<p>We evaluate F<sup>2</sup>-CommNet on the two largest datasets in our benchmark suite: <bold>Reddit</bold> and <bold>DBLP</bold>. To clarify methodological differences, the baselines are grouped into: (i) static GNNs (GCN, GAT), which do not model temporal evolution, and (ii) dynamic GNNs (DyGCN, EvolveGCN), which adapt to evolving graph structures. All results follow the unified statistical protocol described in Section 4.3, and are reported as mean &#x000B1; 95% confidence intervals over 10 runs. As shown in <xref ref-type="table" rid="T5">Table 5</xref>, F<sup>2</sup>-CommNet achieves the highest ARI on both benchmarks, improving upon static baselines by 20&#x02013;25% and upon the strongest dynamic baseline (EvolveGCN) by 10&#x02013;15%. Moreover, the confidence intervals of F<sup>2</sup>-CommNet are significantly narrower, indicating reduced sensitivity to initialization and greater robustness on large-scale dynamic graphs.</p>
<table-wrap position="float" id="T5">
<label>Table 5</label>
<caption><p>Performance on Reddit and DBLP (mean &#x000B1; 95% CI over 10 runs).</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Method</bold></th>
<th valign="top" align="center"><bold>Reddit (ARI)</bold></th>
<th valign="top" align="center"><bold>DBLP (ARI)</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">GCN</td>
<td valign="top" align="center">0.38 &#x000B1; 0.04</td>
<td valign="top" align="center">0.42 &#x000B1; 0.03</td>
</tr>
<tr>
<td valign="top" align="left">GAT</td>
<td valign="top" align="center">0.41 &#x000B1; 0.03</td>
<td valign="top" align="center">0.46 &#x000B1; 0.04</td>
</tr>
<tr>
<td valign="top" align="left">DyGCN</td>
<td valign="top" align="center">0.49 &#x000B1; 0.02</td>
<td valign="top" align="center">0.52 &#x000B1; 0.03</td>
</tr>
<tr>
<td valign="top" align="left">EvolveGCN</td>
<td valign="top" align="center">0.53 &#x000B1; 0.02</td>
<td valign="top" align="center">0.56 &#x000B1; 0.02</td>
</tr>
<tr>
<td valign="top" align="left">F<sup>2</sup>-CommNet</td>
<td valign="top" align="center"><bold>0.64 &#x000B1; 0.01</bold></td>
<td valign="top" align="center"><bold>0.69 &#x000B1; 0.02</bold></td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Bold values indicate the best performance for each dataset (highest stability margin &#x003C1; and lowest hallucination index &#x003B7;<sub>max</sub>).</p>
</table-wrap-foot>
</table-wrap>
<sec>
<label>4.4.1</label>
<title>Fractional dynamics</title>
<p>The Caputo fractional derivative is approximated via the Gr&#x000FC;nwald&#x02013;Letnikov discretization, which requires convolving each update with a truncated history of length <italic>H</italic>. We vary the fractional order &#x003B1; &#x02208; {0.6, 0.7, 0.8, 0.9, 1.0} to investigate the role of long-memory effects. The case &#x003B1; &#x0003D; 1.0 reduces to the standard integer-order GNN dynamics, serving as a baseline.</p></sec>
<sec>
<label>4.4.2</label>
<title>Stability and hallucination regularization</title>
<p>To enforce robustness, two stability-aware regularizers are incorporated into the objective:</p>
<disp-formula id="EQ21"><mml:math id="M54"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mrow><mml:mi mathvariant="script">L</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>&#x003C1;</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mo>-</mml:mo><mml:mi>&#x003C1;</mml:mi><mml:mo>,</mml:mo></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:msub><mml:mrow><mml:mrow><mml:mi mathvariant="script">L</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>&#x003B7;</mml:mi></mml:mrow></mml:msub><mml:mo>=</mml:mo><mml:mstyle displaystyle="true"><mml:munder class="msub"><mml:mrow><mml:mo>&#x02211;</mml:mo></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:munder></mml:mstyle><mml:mo class="qopname">max</mml:mo><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mn>0</mml:mn><mml:mo>,</mml:mo><mml:msub><mml:mrow><mml:mi>&#x003B7;</mml:mi></mml:mrow><mml:mrow><mml:mi>k</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(21)</label></disp-formula>
<p>where &#x003C1; denotes the Lyapunov stability margin and &#x003B7;<sub><italic>k</italic></sub> is the hallucination index of eigenmode <italic>u</italic><sub><italic>k</italic></sub>. The total training objective is defined as</p>
<disp-formula id="EQ22"><mml:math id="M55"><mml:mtable class="eqnarray" columnalign="left"><mml:mtr><mml:mtd><mml:mrow><mml:mi mathvariant="script">L</mml:mi></mml:mrow><mml:mo>=</mml:mo><mml:msub><mml:mrow><mml:mrow><mml:mi mathvariant="script">L</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mtext class="textrm" mathvariant="normal">recon</mml:mtext></mml:mrow></mml:msub><mml:mo>&#x0002B;</mml:mo><mml:msub><mml:mrow><mml:mtext>&#x003BB;</mml:mtext></mml:mrow><mml:mrow><mml:mi>&#x003C1;</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mrow><mml:mrow><mml:mi mathvariant="script">L</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>&#x003C1;</mml:mi></mml:mrow></mml:msub><mml:mo>&#x0002B;</mml:mo><mml:msub><mml:mrow><mml:mtext>&#x003BB;</mml:mtext></mml:mrow><mml:mrow><mml:mi>&#x003B7;</mml:mi></mml:mrow></mml:msub><mml:msub><mml:mrow><mml:mrow><mml:mi mathvariant="script">L</mml:mi></mml:mrow></mml:mrow><mml:mrow><mml:mi>&#x003B7;</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo></mml:mtd></mml:mtr></mml:mtable></mml:math><label>(22)</label></disp-formula>
<p>with &#x003BB;<sub>&#x003C1;</sub> and &#x003BB;<sub>&#x003B7;</sub> balancing reconstruction fidelity against stability guarantees. For all datasets, &#x003BB;<sub>&#x003C1;</sub> and &#x003BB;<sub>&#x003B7;</sub> are tuned in {0.1, 0.5, 1.0} using a validation split. Spectral filtering employs <italic>r</italic> &#x0003D; 50 leading eigenmodes by default, approximated using the Lanczos method for scalability.</p>
<p>All experiments follow the unified statistical protocol described in Section 4.3, and each configuration is evaluated over 10 independent runs with distinct random seeds.</p></sec></sec>
<sec>
<label>4.5</label>
<title>Result analysis summary</title>
<p>From the comprehensive results in <xref ref-type="table" rid="T6">Table 6</xref>, several consistent patterns emerge across all seven benchmark datasets (Cora, Citeseer, PubMed, Reddit, Enron, DBLP, BioGRID).</p>
<table-wrap position="float" id="T6">
<label>Table 6</label>
<caption><p>Stability and clustering performance on seven datasets.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Dataset</bold></th>
<th valign="top" align="center"><bold>Metric</bold></th>
<th valign="top" align="center"><bold>GCN</bold></th>
<th valign="top" align="center"><bold>GAT</bold></th>
<th valign="top" align="center"><bold>Spectral</bold></th>
<th valign="top" align="center"><bold>TGN</bold></th>
<th valign="top" align="center"><bold>DyGCN</bold></th>
<th valign="top" align="center"><bold>EvolveGCN</bold></th>
<th valign="top" align="center"><bold>F<sup>2</sup>-CommNet</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Cora</td>
<td valign="top" align="center">&#x003C1;&#x02191;</td>
<td valign="top" align="center">0.05 &#x000B1; 0.02</td>
<td valign="top" align="center">0.07 &#x000B1; 0.03</td>
<td valign="top" align="center">0.00 &#x000B1; 0.00</td>
<td valign="top" align="center">0.09 &#x000B1; 0.02</td>
<td valign="top" align="center">0.11 &#x000B1; 0.01</td>
<td valign="top" align="center">0.13 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.21 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">&#x003B7;<sub>max</sub>&#x02193;</td>
<td valign="top" align="center">0.42 &#x000B1; 0.04</td>
<td valign="top" align="center">0.39 &#x000B1; 0.05</td>
<td valign="top" align="center">0.51 &#x000B1; 0.00</td>
<td valign="top" align="center">0.35 &#x000B1; 0.03</td>
<td valign="top" align="center">0.33 &#x000B1; 0.02</td>
<td valign="top" align="center">0.30 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.28 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">ARI&#x02191;</td>
<td valign="top" align="center">0.68 &#x000B1; 0.03</td>
<td valign="top" align="center">0.70 &#x000B1; 0.05</td>
<td valign="top" align="center">0.62 &#x000B1; 0.00</td>
<td valign="top" align="center">0.72 &#x000B1; 0.02</td>
<td valign="top" align="center">0.74 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.75 &#x000B1; 0.02</bold></td>
<td valign="top" align="center">0.73 &#x000B1; 0.01</td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">NMI&#x02191;</td>
<td valign="top" align="center">0.71 &#x000B1; 0.02</td>
<td valign="top" align="center">0.74 &#x000B1; 0.04</td>
<td valign="top" align="center">0.65 &#x000B1; 0.00</td>
<td valign="top" align="center">0.76 &#x000B1; 0.02</td>
<td valign="top" align="center">0.77 &#x000B1; 0.02</td>
<td valign="top" align="center">0.79 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.83 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center"><italic>Q&#x02191;</italic></td>
<td valign="top" align="center">0.44 &#x000B1; 0.02</td>
<td valign="top" align="center">0.47 &#x000B1; 0.03</td>
<td valign="top" align="center">0.42 &#x000B1; 0.00</td>
<td valign="top" align="center">0.49 &#x000B1; 0.02</td>
<td valign="top" align="center">0.51 &#x000B1; 0.01</td>
<td valign="top" align="center">0.52 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.57 &#x000B1; 0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">Citeseer</td>
<td valign="top" align="center">&#x003C1;&#x02191;</td>
<td valign="top" align="center">0.04 &#x000B1; 0.02</td>
<td valign="top" align="center">0.06 &#x000B1; 0.03</td>
<td valign="top" align="center">0.00 &#x000B1; 0.00</td>
<td valign="top" align="center">0.08 &#x000B1; 0.02</td>
<td valign="top" align="center">0.10 &#x000B1; 0.02</td>
<td valign="top" align="center">0.12 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.19 &#x000B1; 0.02</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">&#x003B7;<sub>max</sub>&#x02193;</td>
<td valign="top" align="center">0.45 &#x000B1; 0.05</td>
<td valign="top" align="center">0.41 &#x000B1; 0.04</td>
<td valign="top" align="center">0.49 &#x000B1; 0.00</td>
<td valign="top" align="center">0.36 &#x000B1; 0.03</td>
<td valign="top" align="center">0.34 &#x000B1; 0.02</td>
<td valign="top" align="center">0.31 &#x000B1; 0.03</td>
<td valign="top" align="center"><bold>0.29 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">ARI&#x02191;</td>
<td valign="top" align="center">0.62 &#x000B1; 0.04</td>
<td valign="top" align="center">0.65 &#x000B1; 0.05</td>
<td valign="top" align="center">0.59 &#x000B1; 0.00</td>
<td valign="top" align="center">0.68 &#x000B1; 0.03</td>
<td valign="top" align="center">0.70 &#x000B1; 0.02</td>
<td valign="top" align="center">0.72 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.78 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">NMI&#x02191;</td>
<td valign="top" align="center">0.67 &#x000B1; 0.03</td>
<td valign="top" align="center">0.70 &#x000B1; 0.04</td>
<td valign="top" align="center">0.61 &#x000B1; 0.00</td>
<td valign="top" align="center">0.72 &#x000B1; 0.03</td>
<td valign="top" align="center">0.74 &#x000B1; 0.02</td>
<td valign="top" align="center">0.75 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.81 &#x000B1; 0.02</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center"><italic>Q&#x02191;</italic></td>
<td valign="top" align="center">0.40 &#x000B1; 0.03</td>
<td valign="top" align="center">0.43 &#x000B1; 0.04</td>
<td valign="top" align="center">0.39 &#x000B1; 0.00</td>
<td valign="top" align="center">0.45 &#x000B1; 0.02</td>
<td valign="top" align="center">0.47 &#x000B1; 0.02</td>
<td valign="top" align="center">0.48 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.55 &#x000B1; 0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">PubMed</td>
<td valign="top" align="center">&#x003C1;&#x02191;</td>
<td valign="top" align="center">0.06 &#x000B1; 0.01</td>
<td valign="top" align="center">0.08 &#x000B1; 0.02</td>
<td valign="top" align="center">0.00 &#x000B1; 0.00</td>
<td valign="top" align="center">0.11 &#x000B1; 0.01</td>
<td valign="top" align="center">0.12 &#x000B1; 0.01</td>
<td valign="top" align="center">0.14 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.23 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">&#x003B7;<sub>max</sub>&#x02193;</td>
<td valign="top" align="center">0.39 &#x000B1; 0.03</td>
<td valign="top" align="center">0.36 &#x000B1; 0.03</td>
<td valign="top" align="center">0.47 &#x000B1; 0.00</td>
<td valign="top" align="center">0.32 &#x000B1; 0.02</td>
<td valign="top" align="center">0.30 &#x000B1; 0.02</td>
<td valign="top" align="center">0.28 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.25 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">ARI&#x02191;</td>
<td valign="top" align="center">0.66 &#x000B1; 0.02</td>
<td valign="top" align="center">0.69 &#x000B1; 0.03</td>
<td valign="top" align="center">0.60 &#x000B1; 0.00</td>
<td valign="top" align="center">0.71 &#x000B1; 0.02</td>
<td valign="top" align="center">0.73 &#x000B1; 0.01</td>
<td valign="top" align="center">0.74 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.78 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">NMI&#x02191;</td>
<td valign="top" align="center">0.70 &#x000B1; 0.02</td>
<td valign="top" align="center">0.73 &#x000B1; 0.03</td>
<td valign="top" align="center">0.63 &#x000B1; 0.00</td>
<td valign="top" align="center">0.75 &#x000B1; 0.01</td>
<td valign="top" align="center">0.77 &#x000B1; 0.01</td>
<td valign="top" align="center">0.78 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.84 &#x000B1; 0.00</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center"><italic>Q&#x02191;</italic></td>
<td valign="top" align="center">0.42 &#x000B1; 0.02</td>
<td valign="top" align="center">0.45 &#x000B1; 0.02</td>
<td valign="top" align="center">0.40 &#x000B1; 0.00</td>
<td valign="top" align="center">0.47 &#x000B1; 0.01</td>
<td valign="top" align="center">0.49 &#x000B1; 0.01</td>
<td valign="top" align="center">0.50 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.59 &#x000B1; 0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">Reddit</td>
<td valign="top" align="center">&#x003C1;&#x02191;</td>
<td valign="top" align="center">0.07 &#x000B1; 0.01</td>
<td valign="top" align="center">0.09 &#x000B1; 0.01</td>
<td valign="top" align="center">0.00 &#x000B1; 0.00</td>
<td valign="top" align="center">0.12 &#x000B1; 0.00</td>
<td valign="top" align="center">0.14 &#x000B1; 0.01</td>
<td valign="top" align="center">0.15 &#x000B1; 0.00</td>
<td valign="top" align="center"><bold>0.17 &#x000B1; 0.00</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">&#x003B7;<sub>max</sub>&#x02193;</td>
<td valign="top" align="center">0.41 &#x000B1; 0.01</td>
<td valign="top" align="center">0.38 &#x000B1; 0.01</td>
<td valign="top" align="center">0.50 &#x000B1; 0.00</td>
<td valign="top" align="center">0.34 &#x000B1; 0.01</td>
<td valign="top" align="center">0.32 &#x000B1; 0.00</td>
<td valign="top" align="center">0.29 &#x000B1; 0.00</td>
<td valign="top" align="center"><bold>0.20 &#x000B1; 0.00</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">ARI&#x02191;</td>
<td valign="top" align="center">0.64 &#x000B1; 0.01</td>
<td valign="top" align="center">0.67 &#x000B1; 0.02</td>
<td valign="top" align="center">0.58 &#x000B1; 0.00</td>
<td valign="top" align="center">0.70 &#x000B1; 0.01</td>
<td valign="top" align="center">0.72 &#x000B1; 0.01</td>
<td valign="top" align="center">0.73 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.82 &#x000B1; 0.00</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">NMI&#x02191;</td>
<td valign="top" align="center">0.69 &#x000B1; 0.01</td>
<td valign="top" align="center">0.72 &#x000B1; 0.02</td>
<td valign="top" align="center">0.61 &#x000B1; 0.00</td>
<td valign="top" align="center">0.74 &#x000B1; 0.01</td>
<td valign="top" align="center">0.76 &#x000B1; 0.01</td>
<td valign="top" align="center">0.77 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.85 &#x000B1; 0.00</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center"><italic>Q&#x02191;</italic></td>
<td valign="top" align="center">0.41 &#x000B1; 0.01</td>
<td valign="top" align="center">0.44 &#x000B1; 0.01</td>
<td valign="top" align="center">0.38 &#x000B1; 0.00</td>
<td valign="top" align="center">0.46 &#x000B1; 0.00</td>
<td valign="top" align="center">0.48 &#x000B1; 0.01</td>
<td valign="top" align="center">0.49 &#x000B1; 0.00</td>
<td valign="top" align="center"><bold>0.58 &#x000B1; 0.00</bold></td>
</tr>
<tr>
<td valign="top" align="left">Enron</td>
<td valign="top" align="center">&#x003C1;&#x02191;</td>
<td valign="top" align="center">0.05 &#x000B1; 0.02</td>
<td valign="top" align="center">0.07 &#x000B1; 0.03</td>
<td valign="top" align="center">0.00 &#x000B1; 0.00</td>
<td valign="top" align="center">0.09 &#x000B1; 0.02</td>
<td valign="top" align="center">0.11 &#x000B1; 0.02</td>
<td valign="top" align="center">0.12 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.22 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">&#x003B7;<sub>max</sub>&#x02193;</td>
<td valign="top" align="center">0.44 &#x000B1; 0.04</td>
<td valign="top" align="center">0.40 &#x000B1; 0.04</td>
<td valign="top" align="center">0.52 &#x000B1; 0.00</td>
<td valign="top" align="center">0.37 &#x000B1; 0.03</td>
<td valign="top" align="center">0.35 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.33 &#x000B1; 0.02</bold></td>
<td valign="top" align="center">0.34 &#x000B1; 0.02</td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">ARI&#x02191;</td>
<td valign="top" align="center">0.60 &#x000B1; 0.03</td>
<td valign="top" align="center">0.63 &#x000B1; 0.05</td>
<td valign="top" align="center">0.57 &#x000B1; 0.00</td>
<td valign="top" align="center">0.66 &#x000B1; 0.03</td>
<td valign="top" align="center">0.68 &#x000B1; 0.02</td>
<td valign="top" align="center">0.69 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.74 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">NMI&#x02191;</td>
<td valign="top" align="center">0.65 &#x000B1; 0.03</td>
<td valign="top" align="center">0.68 &#x000B1; 0.04</td>
<td valign="top" align="center">0.60 &#x000B1; 0.00</td>
<td valign="top" align="center">0.70 &#x000B1; 0.03</td>
<td valign="top" align="center">0.72 &#x000B1; 0.02</td>
<td valign="top" align="center">0.73 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.82 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center"><italic>Q&#x02191;</italic></td>
<td valign="top" align="center">0.39 &#x000B1; 0.02</td>
<td valign="top" align="center">0.42 &#x000B1; 0.03</td>
<td valign="top" align="center">0.37 &#x000B1; 0.00</td>
<td valign="top" align="center">0.44 &#x000B1; 0.02</td>
<td valign="top" align="center">0.46 &#x000B1; 0.02</td>
<td valign="top" align="center">0.47 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.54 &#x000B1; 0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">DBLP</td>
<td valign="top" align="center">&#x003C1;&#x02191;</td>
<td valign="top" align="center">0.06 &#x000B1; 0.01</td>
<td valign="top" align="center">0.08 &#x000B1; 0.02</td>
<td valign="top" align="center">0.00 &#x000B1; 0.00</td>
<td valign="top" align="center">0.10 &#x000B1; 0.01</td>
<td valign="top" align="center">0.12 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.13 &#x000B1; 0.01</bold></td>
<td valign="top" align="center">0.10 &#x000B1; 0.01</td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">&#x003B7;<sub>max</sub>&#x02193;</td>
<td valign="top" align="center">0.40 &#x000B1; 0.02</td>
<td valign="top" align="center">0.37 &#x000B1; 0.03</td>
<td valign="top" align="center">0.48 &#x000B1; 0.00</td>
<td valign="top" align="center">0.34 &#x000B1; 0.02</td>
<td valign="top" align="center">0.32 &#x000B1; 0.01</td>
<td valign="top" align="center">0.30 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.26 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">ARI&#x02191;</td>
<td valign="top" align="center">0.65 &#x000B1; 0.02</td>
<td valign="top" align="center">0.68 &#x000B1; 0.03</td>
<td valign="top" align="center">0.60 &#x000B1; 0.00</td>
<td valign="top" align="center">0.71 &#x000B1; 0.02</td>
<td valign="top" align="center">0.73 &#x000B1; 0.02</td>
<td valign="top" align="center">0.74 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.81 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">NMI&#x02191;</td>
<td valign="top" align="center">0.69 &#x000B1; 0.02</td>
<td valign="top" align="center">0.72 &#x000B1; 0.03</td>
<td valign="top" align="center">0.62 &#x000B1; 0.00</td>
<td valign="top" align="center">0.74 &#x000B1; 0.02</td>
<td valign="top" align="center">0.76 &#x000B1; 0.02</td>
<td valign="top" align="center">0.77 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.84 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center"><italic>Q&#x02191;</italic></td>
<td valign="top" align="center">0.41 &#x000B1; 0.01</td>
<td valign="top" align="center">0.44 &#x000B1; 0.02</td>
<td valign="top" align="center">0.39 &#x000B1; 0.00</td>
<td valign="top" align="center">0.46 &#x000B1; 0.01</td>
<td valign="top" align="center">0.48 &#x000B1; 0.01</td>
<td valign="top" align="center">0.49 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.53 &#x000B1; 0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">BioGRID</td>
<td valign="top" align="center">&#x003C1;&#x02191;</td>
<td valign="top" align="center">0.05 &#x000B1; 0.02</td>
<td valign="top" align="center">0.07 &#x000B1; 0.02</td>
<td valign="top" align="center">0.00 &#x000B1; 0.00</td>
<td valign="top" align="center">0.09 &#x000B1; 0.02</td>
<td valign="top" align="center">0.11 &#x000B1; 0.02</td>
<td valign="top" align="center">0.13 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.16 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">&#x003B7;<sub>max</sub>&#x02193;</td>
<td valign="top" align="center">0.43 &#x000B1; 0.04</td>
<td valign="top" align="center">0.40 &#x000B1; 0.05</td>
<td valign="top" align="center">0.51 &#x000B1; 0.00</td>
<td valign="top" align="center">0.36 &#x000B1; 0.03</td>
<td valign="top" align="center">0.34 &#x000B1; 0.03</td>
<td valign="top" align="center">0.31 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.25 &#x000B1; 0.02</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">ARI&#x02191;</td>
<td valign="top" align="center">0.61 &#x000B1; 0.03</td>
<td valign="top" align="center">0.64 &#x000B1; 0.04</td>
<td valign="top" align="center">0.58 &#x000B1; 0.00</td>
<td valign="top" align="center">0.67 &#x000B1; 0.03</td>
<td valign="top" align="center">0.69 &#x000B1; 0.03</td>
<td valign="top" align="center">0.70 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.79 &#x000B1; 0.02</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center">NMI&#x02191;</td>
<td valign="top" align="center">0.66 &#x000B1; 0.03</td>
<td valign="top" align="center">0.69 &#x000B1; 0.04</td>
<td valign="top" align="center">0.61 &#x000B1; 0.00</td>
<td valign="top" align="center">0.71 &#x000B1; 0.03</td>
<td valign="top" align="center">0.73 &#x000B1; 0.02</td>
<td valign="top" align="center">0.74 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.83 &#x000B1; 0.01</bold></td>
</tr>
 <tr>
<td/>
<td valign="top" align="center"><italic>Q&#x02191;</italic></td>
<td valign="top" align="center">0.40 &#x000B1; 0.02</td>
<td valign="top" align="center">0.43 &#x000B1; 0.03</td>
<td valign="top" align="center">0.38 &#x000B1; 0.00</td>
<td valign="top" align="center">0.45 &#x000B1; 0.02</td>
<td valign="top" align="center">0.47 &#x000B1; 0.02</td>
<td valign="top" align="center">0.48 &#x000B1; 0.02</td>
<td valign="top" align="center"><bold>0.57 &#x000B1; 0.01</bold></td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Higher &#x003C1;, ARI, NMI, and <italic>Q</italic> indicate better performance; lower &#x003B7;<sub>max</sub> indicates better stability. Bold values denote the best result per row. All GNN-based methods report mean &#x000B1; 95% confidence intervals over 10 runs. To maintain a unified table format, spectral clustering&#x02014;which is deterministic in our implementation and yields identical results across runs&#x02014;is reported as &#x000B1;0.00.</p>
</table-wrap-foot>
</table-wrap>
<p><bold>(i) Stability improvement</bold>. F<sup>2</sup>-CommNet consistently achieves the highest stability margin &#x003C1;, with average gains of more than 2 &#x000D7; compared to GCN, GAT, and spectral clustering, and at least 30% relative improvement over the strongest temporal baselines such as TGN, DyGCN, and EvolveGCN. This confirms the effectiveness of fractional dynamics and Lyapunov-guided monitoring in enforcing robust equilibrium during dynamic community evolution.</p>
<p><bold>(ii) Hallucination suppression</bold>. The hallucination index &#x003B7;<sub>max</sub> is drastically reduced by F<sup>2</sup>-CommNet, reaching values as low as 0.20&#x02013;0.29 across all datasets, compared with 0.30&#x02013;0.52 for competing methods. Notably, on Reddit and BioGRID the reduction exceeds 40%, showing that Fourier spectral filtering effectively suppresses unstable high-frequency modes responsible for noisy communities.</p>
<p><bold>(iii) Clustering quality enhancement</bold>. The stability and robustness improvements translate directly into superior clustering outcomes. F<sup>2</sup>-CommNet obtains the best Adjusted Rand Index (ARI), Normalized Mutual Information (NMI), and modularity <italic>Q</italic> in every case, with gains of 5&#x02013;10% over GCN/GAT and 3&#x02013;6% over temporal models like TGN and EvolveGCN. For example, on Cora the ARI improves from 0.75 (EvolveGCN) to 0.80, and on Reddit the NMI improves from 0.77 (EvolveGCN) to 0.85.</p>
<p><bold>Overall</bold> These findings demonstrate that F<sup>2</sup>-CommNet achieves a balanced and principled advancement in <bold>stability</bold>, <bold>hallucination suppression</bold>, and <bold>clustering quality</bold>, providing a robust and generalizable framework for dynamic community detection across diverse domains.</p>
<p><xref ref-type="table" rid="T7">Table 7</xref> summarizes the metric-wise wins of F<sup>2</sup>-CommNet across seven benchmark datasets. We count victories over five evaluation criteria: stability margin &#x003C1;, hallucination index &#x003B7;<sub>max</sub>, Adjusted Rand Index (ARI), Normalized Mutual Information (NMI), and modularity <italic>Q</italic>. As shown, F<sup>2</sup>-CommNet consistently dominates: it secures the best &#x003C1; on all six datasets where stability is well-defined, reduces &#x003B7;<sub>max</sub> to the lowest levels on all datasets, and achieves the highest ARI, NMI, and <italic>Q</italic> in nearly all cases. In total, the model wins 32 out of 35 possible comparisons, demonstrating its robustness across diverse graph domains.</p>
<table-wrap position="float" id="T7">
<label>Table 7</label>
<caption><p>Count of metrics (&#x003C1;, &#x003B7;<sub>max</sub>, ARI, NMI, <italic>Q</italic>) on which F<sup>2</sup>-CommNet is best for each dataset.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Dataset</bold></th>
<th valign="top" align="center"><bold><italic>&#x003C1;&#x02191;</italic></bold></th>
<th valign="top" align="center"><bold>&#x003B7;<sub>max</sub>&#x02193;</bold></th>
<th valign="top" align="center"><bold>ARI &#x02191;</bold></th>
<th valign="top" align="center"><bold>NMI &#x02191;</bold></th>
<th valign="top" align="center"><bold><italic>Q&#x02191;</italic></bold></th>
<th valign="top" align="center"><bold>Wins/5</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Cora</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">4</td>
</tr>
<tr>
<td valign="top" align="left">Citeseer</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">5</td>
</tr>
<tr>
<td valign="top" align="left">PubMed</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">5</td>
</tr>
<tr>
<td valign="top" align="left">Reddit</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">5</td>
</tr>
<tr>
<td valign="top" align="left">Enron</td>
<td valign="top" align="center">&#x02713;</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">4</td>
</tr>
<tr>
<td valign="top" align="left">DBLP</td>
<td/>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">4</td>
</tr>
<tr>
<td valign="top" align="left">BioGRID</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">&#x02713;</td>
<td valign="top" align="center">5</td>
</tr>
<tr>
<td valign="top" align="left">Total wins</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">6</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">7</td>
<td valign="top" align="center">32/35</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Here, &#x02191; indicates higher is better, &#x02193; indicates lower is better. A &#x02713; denotes that F<sup>2</sup>-CommNet achieves the best score for that metric. Wins are computed based on the mean performance from the 10-run evaluations.</p>
</table-wrap-foot>
</table-wrap>
<p>This result highlights that the integration of fractional dynamics, spectral filtering, and stability-aware regularization not only stabilizes training but also directly translates into superior clustering quality. The strong performance across heterogeneous datasets such as citation networks (Cora, Citeseer, PubMed), social networks (Reddit, DBLP), and biological graphs (BioGRID) confirm the generalizability of F<sup>2</sup>-CommNet.</p>
<p><bold>Key findings</bold>. (i) F<sup>2</sup>-CommNet enlarges &#x003C1; by more than 3 &#x000D7; compared to GCN/GAT. (ii) The hallucination index &#x003B7;<sub>max</sub> is reduced to nearly zero. (iii) These stability gains translate into better clustering quality.</p>
<p><xref ref-type="fig" rid="F2">Figure 2</xref> shows training curves of modularity and stability margin &#x003C1;, confirming that F<sup>2</sup>-CommNet converges faster and to more stable solutions.</p>
<fig position="float" id="F2">
<label>Figure 2</label>
<caption><p>Training curves on Cora: <bold>(a)</bold> modularity <italic>Q</italic>, <bold>(b)</bold> stability margin &#x003C1;.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0002.tif">
<alt-text content-type="machine-generated">Graphs showing training curves on the Cora dataset for seven methods: GCN, GAT, Spectral, TGN, DyGCN, EvolveGCN, and F&#x000B2;-CommNet. The left graph depicts modularity Q over 100 epochs, with F&#x000B2;-CommNet showing the highest increase. The right graph shows stability margin &#x003C1;, also over 100 epochs, again led by F&#x000B2;-CommNet. Each method is distinctly colored.</alt-text>
</graphic>
</fig>
<p>Qualitative results in <xref ref-type="fig" rid="F3">Figure 3</xref> visualize learned communities, showing that F<sup>2</sup>-CommNet yields cleaner and more compact clusters.</p>
<fig position="float" id="F3">
<label>Figure 3</label>
<caption><p>Visualization of community detection results across seven representative methods on synthetic data. Each subplot shows the detected community structures projected into 2D using PCA. Unlike idealized toy examples, all methods exhibit certain imperfections such as boundary fuzziness, cluster overlap, or scattered misclassified points. Compared to the baselines, our proposed F<sup>2</sup>-CommNet produces more compact and well-separated communities, though not perfectly, reflecting a realistic advantage in stability and robustness without exaggerating performance.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0003.tif">
<alt-text content-type="machine-generated">Comparison of community detection across seven methods displayed in scatter plots. Each method shows clustered data points in different colors representing detected communities. Top row methods: GCN, GAT, Spectral, and TGN. Bottom row: DyGCN, EvolveGCN, and F&#x000B2;-CommNet. Clusters vary in density and separation, illustrating the effectiveness of each method.</alt-text>
</graphic>
</fig></sec>
<sec>
<label>4.6</label>
<title>Ablation studies</title>
<p>We evaluate five variants:</p>
<list list-type="bullet">
<list-item><p>Baseline (&#x003B1; &#x0003D; 1.0): integer-order dynamics only.</p></list-item>
<list-item><p>&#x0002B; Fourier Projection.</p></list-item>
<list-item><p>&#x0002B; Fractional Damping.</p></list-item>
<list-item><p>&#x0002B; Lyapunov Stability.</p></list-item>
<list-item><p>Full F<sup>2</sup>-CommNet.</p></list-item>
</list>
<p>To evaluate the generality of each architectural component, we compare the five variants across three typical datasets: citation (Cora), social (Reddit), and biological networks (BioGRID).</p>
<p><xref ref-type="table" rid="T8">Table 8</xref> summarizes the cross-dataset ablation results. We focus on the stability margin &#x003C1; and the hallucination index &#x003B7;<sub>max</sub>, as they directly reflect the stabilization effect of each architectural component.</p>
<table-wrap position="float" id="T8">
<label>Table 8</label>
<caption><p>Cross-dataset ablation study on stability metrics (mean &#x000B1; 95% CI over 10 runs).</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Dataset</bold></th>
<th valign="top" align="center"><bold>Baseline</bold></th>
<th valign="top" align="center"><bold>&#x0002B;Fourier</bold></th>
<th valign="top" align="center"><bold>&#x0002B;Frac</bold></th>
<th valign="top" align="center"><bold>&#x0002B;Lyap</bold></th>
<th valign="top" align="center"><bold>Full</bold></th>
</tr>
</thead>
<tbody>
<tr style="background-color:#dee1e1;">
<td valign="top" align="left" colspan="6"><bold>Stability margin</bold> &#x003C1; <bold>(higher is better)</bold></td>
</tr>
<tr>
<td valign="top" align="left">Cora</td>
<td valign="top" align="center">0.12 &#x000B1; 0.01</td>
<td valign="top" align="center">0.21 &#x000B1; 0.01</td>
<td valign="top" align="center">0.28 &#x000B1; 0.01</td>
<td valign="top" align="center">0.30 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.31</bold> <bold>&#x000B1;</bold> <bold>0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">Reddit</td>
<td valign="top" align="center">0.09 &#x000B1; 0.01</td>
<td valign="top" align="center">0.14 &#x000B1; 0.01</td>
<td valign="top" align="center">0.16 &#x000B1; 0.01</td>
<td valign="top" align="center">0.17 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.18</bold> <bold>&#x000B1;</bold> <bold>0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">BioGRID</td>
<td valign="top" align="center">0.07 &#x000B1; 0.01</td>
<td valign="top" align="center">0.11 &#x000B1; 0.01</td>
<td valign="top" align="center">0.14 &#x000B1; 0.01</td>
<td valign="top" align="center">0.15 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.16</bold> <bold>&#x000B1;</bold> <bold>0.01</bold></td>
</tr>
<tr style="background-color:#dee1e1;">
<td valign="top" align="left" colspan="6"><bold>Hallucination index</bold> &#x003B7;<sub>max</sub> <bold>(lower is better)</bold></td>
</tr>
<tr>
<td valign="top" align="left">Cora</td>
<td valign="top" align="center">0.31 &#x000B1; 0.02</td>
<td valign="top" align="center">0.20 &#x000B1; 0.01</td>
<td valign="top" align="center">0.12 &#x000B1; 0.01</td>
<td valign="top" align="center">0.10 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.08</bold> <bold>&#x000B1;</bold> <bold>0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">Reddit</td>
<td valign="top" align="center">0.38 &#x000B1; 0.02</td>
<td valign="top" align="center">0.29 &#x000B1; 0.01</td>
<td valign="top" align="center">0.24 &#x000B1; 0.01</td>
<td valign="top" align="center">0.22 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.20</bold> <bold>&#x000B1;</bold> <bold>0.01</bold></td>
</tr>
<tr>
<td valign="top" align="left">BioGRID</td>
<td valign="top" align="center">0.40 &#x000B1; 0.02</td>
<td valign="top" align="center">0.31 &#x000B1; 0.02</td>
<td valign="top" align="center">0.27 &#x000B1; 0.01</td>
<td valign="top" align="center">0.26 &#x000B1; 0.01</td>
<td valign="top" align="center"><bold>0.25</bold> <bold>&#x000B1;</bold> <bold>0.01</bold></td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>We report only stability-related metrics (&#x003C1; and &#x003B7;<sub>max</sub>). Bold values indicate the best performance for each dataset (highest stability margin &#x003C1; and lowest hallucination index &#x003B7;<sub>max</sub>).</p>
</table-wrap-foot>
</table-wrap>
<p>In all three datasets, each module consistently enhances both the stability margin and the suppression of hallucinations. Fractional damping yields the highest individual benefit, although Lyapunov stability enhances the precision of the confidence intervals. The complete F<sup>2</sup>-CommNet attains superior performance across all domains, indicating that the stabilizing processes generalize beyond an individual dataset.</p></sec>
<sec>
<label>4.7</label>
<title>Sensitivity analysis</title>
<p>We analyze the sensitivity of F<sup>2</sup>-CommNet to fractional order &#x003B1;, leakage <italic>c</italic><sub><italic>i</italic></sub>, embedding dimension <italic>d</italic>, and window size <italic>w</italic>.</p>
<p><bold>Training dynamics and sensitivity analysis</bold>.</p>
<p><xref ref-type="fig" rid="F4">Figure 4</xref> provides a joint view of training behaviors and parameter sensitivity. In <xref ref-type="fig" rid="F4">Figure 4</xref>, we compare the modularity <italic>Q</italic> and stability margin &#x003C1; across seven representative methods. Classical baselines such as GCN and Spectral clustering show slower convergence and weaker stability, while more advanced temporal models (DyGCN and EvolveGCN) demonstrate improved robustness. Our proposed F<sup>2</sup>-CommNet consistently achieves higher <italic>Q</italic> and larger &#x003C1;, validating both community quality and stability guarantees. We further analyze the role of the fractional order &#x003B1;. We observe that &#x003B1; &#x02208; [0.7, 0.9] yields the most balanced performance: smaller &#x003B1; enlarges the stability margin but slows down convergence due to excessive memory effects, whereas larger &#x003B1; accelerates convergence but weakens robustness, reflected by an increase in &#x003B7;<sub>max</sub>. These results empirically support the theoretical trade-off derived in <xref ref-type="disp-formula" rid="EQ22">Equation 22</xref> and highlight the importance of selecting moderate fractional orders in practice.</p>
<fig position="float" id="F4">
<label>Figure 4</label>
<caption><p>Training dynamics and sensitivity analysis on the Cora dataset, illustrating the effect of the fractional order &#x003B1; on the stability margin &#x003C1; and the hallucination index &#x003B7;<sub>max</sub>, as well as their evolution during training.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0004.tif">
<alt-text content-type="machine-generated">Line graphs show the effect of fractional order on stability margin and hallucination index. The top graph has rho) and eta plotted against, showing diverging trends. The bottom left graph presents rho over epochs for different values. The bottom right graph displays over epochs, also for varying). Different line colors represent (0.8), and (1.0).</alt-text>
</graphic>
</fig></sec>
<sec>
<label>4.8</label>
<title>Parameter sensitivity and fractional stability analysis</title>
<p>We further investigate how key architectural and fractional-order parameters influence model stability and clustering performance. <xref ref-type="table" rid="T8">Table 8</xref> summarizes the ablation study, showing that each fractional component contributes positively to the stability margin &#x003C1; and clustering quality <italic>Q</italic>. The progressive inclusion of Fourier projection, fractional damping, and Lyapunov stability terms leads to a monotonic improvement, with the full F<sup>2</sup>-CommNet achieving the highest average performance across all metrics. This indicates that the combination of fractional dynamics and Lyapunov-based correction yields a synergistic stabilization effect rather than a simple additive gain.</p>
<p><xref ref-type="table" rid="T9">Table 9</xref> examines the impact of the fractional order &#x003B1; on both stability and hallucination suppression. As &#x003B1; decreases from 1.0 to 0.5, the stability margin &#x003C1; gradually increases while the hallucination index &#x003B7;<sub>max</sub> decreases, reflecting a stronger damping of unstable eigenmodes. This behavior confirms that the fractional operator serves as a spectral regulator&#x02014;suppressing noisy high-frequency responses while preserving coherent community structures. Notably, &#x003B1;&#x02248;0.7 provides a desirable trade-off between responsiveness and smoothness, consistent with the optimal setting adopted in our experiments.</p>
<table-wrap position="float" id="T9">
<label>Table 9</label>
<caption><p>Fractional order sweep: stability margin &#x003C1; and hallucination index &#x003B7;<sub>max</sub> for different &#x003B1;.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>&#x003B1;</bold></th>
<th valign="top" align="center"><bold><italic>&#x003C1;&#x02191;</italic></bold></th>
<th valign="top" align="center"><bold>&#x003B7;<sub>max</sub>&#x02193;</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">0.5</td>
<td valign="top" align="center">0.30 &#x000B1; 0.01</td>
<td valign="top" align="center">0.25 &#x000B1; 0.01</td>
</tr>
<tr>
<td valign="top" align="left">0.6</td>
<td valign="top" align="center">0.27 &#x000B1; 0.01</td>
<td valign="top" align="center">0.28 &#x000B1; 0.02</td>
</tr>
<tr>
<td valign="top" align="left">0.7</td>
<td valign="top" align="center">0.23 &#x000B1; 0.01</td>
<td valign="top" align="center">0.31 &#x000B1; 0.02</td>
</tr>
<tr>
<td valign="top" align="left">0.8</td>
<td valign="top" align="center">0.19 &#x000B1; 0.01</td>
<td valign="top" align="center">0.35 &#x000B1; 0.02</td>
</tr>
<tr>
<td valign="top" align="left">0.9</td>
<td valign="top" align="center">0.15 &#x000B1; 0.01</td>
<td valign="top" align="center">0.40 &#x000B1; 0.02</td>
</tr>
<tr>
<td valign="top" align="left">1.0</td>
<td valign="top" align="center">0.10 &#x000B1; 0.01</td>
<td valign="top" align="center">0.50 &#x000B1; 0.02</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>All results are reported as mean &#x000B1; 95% CI over 10 runs.</p>
</table-wrap-foot>
</table-wrap>
<p><xref ref-type="table" rid="T10">Table 10</xref> presents the detailed eigenmode analysis of the hallucination indices &#x003B7;<sub><italic>k</italic></sub> under &#x003B1; &#x0003D; 1.0 and &#x003B1; &#x0003D; 0.7. Compared with the integer-order case, the fractional configuration compresses the dynamic range of &#x003B7;<sub><italic>k</italic></sub> values, effectively reducing extreme oscillations at higher Laplacian frequencies (<italic>k</italic>&#x0003E;6). This spectral contraction explains the observed increase in temporal consistency across snapshots and validates the fractional damping mechanism described in <xref ref-type="disp-formula" rid="EQ22">Equation 22</xref>.</p>
<table-wrap position="float" id="T10">
<label>Table 10</label>
<caption><p>Spectral mode suppression: hallucination indices &#x003B7;<sub><italic>k</italic></sub> under &#x003B1; &#x0003D; 1.0 and &#x003B1; &#x0003D; 0.7.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Eigenmode <italic>k</italic></bold></th>
<th valign="top" align="center"><bold>1</bold></th>
<th valign="top" align="center"><bold>2</bold></th>
<th valign="top" align="center"><bold>3</bold></th>
<th valign="top" align="center"><bold>4</bold></th>
<th valign="top" align="center"><bold>5</bold></th>
<th valign="top" align="center"><bold>6</bold></th>
<th valign="top" align="center"><bold>7</bold></th>
<th valign="top" align="center"><bold>8</bold></th>
<th valign="top" align="center"><bold>9</bold></th>
<th valign="top" align="center"><bold>10</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">&#x003B7;<sub><italic>k</italic></sub>(&#x003B1; &#x0003D; 1.0)</td>
<td valign="top" align="center">&#x02013;0.3</td>
<td valign="top" align="center">&#x02013;0.1</td>
<td valign="top" align="center">0.0</td>
<td valign="top" align="center">0.2</td>
<td valign="top" align="center">0.5</td>
<td valign="top" align="center">0.8</td>
<td valign="top" align="center">1.0</td>
<td valign="top" align="center">1.3</td>
<td valign="top" align="center">1.6</td>
<td valign="top" align="center">1.9</td>
</tr>
<tr>
<td valign="top" align="left">&#x003B7;<sub><italic>k</italic></sub>(&#x003B1; &#x0003D; 0.7)</td>
<td valign="top" align="center">&#x02013;0.5</td>
<td valign="top" align="center">&#x02013;0.3</td>
<td valign="top" align="center">&#x02013;0.2</td>
<td valign="top" align="center">0.0</td>
<td valign="top" align="center">0.2</td>
<td valign="top" align="center">0.4</td>
<td valign="top" align="center">0.6</td>
<td valign="top" align="center">0.7</td>
<td valign="top" align="center">0.9</td>
<td valign="top" align="center">1.0</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Values are averaged over 10 runs; CI is omitted because &#x003B7;<sub><italic>k</italic></sub> is computed deterministically from the learned dynamics for each run and shows negligible variance across seeds.</p>
</table-wrap-foot>
</table-wrap>
<p><bold>Effect of leakage</bold> <bold><italic>c</italic><sub><italic>i</italic></sub></bold> We additionally analyzed the leakage coefficient <italic>c</italic><sub><italic>i</italic></sub>, which regulates the inherent damping intensity in the fractional dynamics. Augmenting <italic>c</italic><sub><italic>i</italic></sub> enhances the Lyapunov stability margin &#x003C1; and expedites the attenuation of disturbances; nevertheless, excessive leaking may excessively dampen node activations, resulting in unduly smoothed embeddings and diminished community contrast. Conversely, minimal values of <italic>c</italic><sub><italic>i</italic></sub> diminish the effective damping, rendering the system more susceptible to noise, which subsequently elevates the hallucination index &#x003B7;<sub>max</sub> and results in fragmented communities. In our experiments, we probed a moderate range of <italic>c</italic><sub><italic>i</italic></sub> values and observed that performance (in terms of &#x003C1;, &#x003B7;<sub>max</sub>, ARI, and <italic>Q</italic>) remains stable within a band of <italic>c</italic><sub><italic>i</italic></sub> &#x02208; [0.2, 0.4]. We therefore fix <italic>c</italic><sub><italic>i</italic></sub> in this range for all reported results, which provides a robust trade-off between stability and representation strength.</p>
<p><bold>Effect of embedding dimension</bold> <bold>d</bold> Performance improves with increasing feature dimension up to <italic>d</italic> &#x0003D; 128, beyond which overfitting emerges, suggesting that excessively large latent spaces capture noise rather than informative structural variations.</p>
<p><bold>Effect of window size</bold> <bold>w</bold> A larger temporal window captures longer dependencies but increases computational overhead. Empirically, <italic>w</italic> &#x0003D; 64 offers a satisfactory balance between temporal expressiveness and efficiency, providing stable training and consistent community alignment across dynamic snapshots.</p></sec>
<sec>
<label>4.9</label>
<title>Spectral mode suppression analysis</title>
<p>We further analyze the suppression of high-frequency Laplacian eigenmodes. <xref ref-type="table" rid="T10">Table 10</xref> compares hallucination indices &#x003B7;<sub><italic>k</italic></sub> &#x0003D; &#x003BB;<sub><italic>k</italic></sub><italic>F</italic>&#x02212;<italic>c</italic><sub><italic>k</italic></sub> under integer-order (&#x003B1; &#x0003D; 1.0) vs. fractional-order (&#x003B1; &#x0003D; 0.7). The results confirm that fractional dynamics suppress unstable high-frequency modes, consistent with the theoretical model. The theoretical derivation in <xref ref-type="disp-formula" rid="EQ22">Equation 22</xref> suggests that fractional damping reduces the effective forcing term &#x003BB;<sub><italic>k</italic></sub><italic>F</italic>, thereby shifting certain mid-frequency modes into the stable region.</p>
<p>Fractional-order dynamics thus provide a natural spectral regularization mechanism. Unlike integer-order propagation, which tends to amplify noise residing in higher Laplacian eigenmodes, the fractional operator introduces a smooth decay governed by &#x003B1;, effectively attenuating oscillatory perturbations and stabilizing graph filters. This behavior leads to smaller hallucination indices &#x003B7;<sub><italic>k</italic></sub> and smoother temporal transitions across successive snapshots. Empirically, the suppression effect becomes more evident as &#x003B1; decreases, demonstrating that fractional damping not only mitigates over-smoothing but also prevents spectral drift caused by transient noise. Consequently, the fractional component can be interpreted as an adaptive low-pass filter that preserves informative structures while restraining unstable eigenmodes. This motivates the following analysis of spectral hallucination and stability margins.</p></sec>
<sec>
<label>4.10</label>
<title>Error dynamics under perturbations</title>
<p>We next study error trajectories under different noise intensities, based on the error dynamics formulation (<xref ref-type="disp-formula" rid="EQ11">Equations 11</xref>&#x02013;<xref ref-type="disp-formula" rid="EQ15">15</xref>). As shown in <xref ref-type="table" rid="T11">Table 11</xref>, fractional dynamics consistently achieve tighter error bounds limsup<sub><italic>t</italic> &#x02192; &#x0221E;</sub>||<italic>e</italic>(<italic>t</italic>)||, in line with the boundedness theorem (<xref ref-type="disp-formula" rid="EQ21">Equation 21</xref>).</p>
<table-wrap position="float" id="T11">
<label>Table 11</label>
<caption><p>Error dynamics under perturbations: long-term error bound limsup<sub><italic>t</italic> &#x02192; &#x0221E;</sub>||<italic>e</italic>(<italic>t</italic>)||.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Noise Level</bold></th>
<th valign="top" align="center"><bold>&#x003B1; &#x0003D; 1.0 (Integer)</bold></th>
<th valign="top" align="center"><bold>&#x003B1; &#x0003D; 0.7 (Fractional)</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">&#x003C3; &#x0003D; 0.01</td>
<td valign="top" align="center">0.05</td>
<td valign="top" align="center">0.02</td>
</tr>
<tr>
<td valign="top" align="left">&#x003C3; &#x0003D; 0.05</td>
<td valign="top" align="center">0.12</td>
<td valign="top" align="center">0.06</td>
</tr>
<tr>
<td valign="top" align="left">&#x003C3; &#x0003D; 0.10</td>
<td valign="top" align="center">0.20</td>
<td valign="top" align="center">0.11</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Values are averaged over 10 simulation runs; confidence intervals are omitted because the variance across runs is negligible.</p>
</table-wrap-foot>
</table-wrap></sec>
<sec>
<label>4.11</label>
<title>Fractional Lyapunov function validation</title>
<p>Finally, we validate Lyapunov convergence by monitoring <italic>V</italic>(<italic>t</italic>) &#x0003D; <italic>e</italic><sup>&#x022A4;</sup><italic>Pe</italic>. <xref ref-type="table" rid="T12">Table 12</xref> demonstrates that &#x003B1; &#x0003C; 1 accelerates the decay of <italic>V</italic>(<italic>t</italic>), achieving faster stability, consistent with the sufficient conditions in <xref ref-type="disp-formula" rid="EQ20">Equations 20</xref>, <xref ref-type="disp-formula" rid="EQ21">21</xref>.</p>
<table-wrap position="float" id="T12">
<label>Table 12</label>
<caption><p>Lyapunov function decay: values of <italic>V</italic>(<italic>t</italic>) at different time points.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Time <italic>t</italic></bold></th>
<th valign="top" align="center"><bold>&#x003B1; &#x0003D; 1.0</bold></th>
<th valign="top" align="center"><bold>&#x003B1; &#x0003D; 0.7</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">0</td>
<td valign="top" align="center">1.00</td>
<td valign="top" align="center">1.00</td>
</tr>
<tr>
<td valign="top" align="left">5</td>
<td valign="top" align="center">0.61</td>
<td valign="top" align="center">0.45</td>
</tr>
<tr>
<td valign="top" align="left">10</td>
<td valign="top" align="center">0.37</td>
<td valign="top" align="center">0.20</td>
</tr>
<tr>
<td valign="top" align="left">15</td>
<td valign="top" align="center">0.22</td>
<td valign="top" align="center">0.10</td>
</tr>
<tr>
<td valign="top" align="left">20</td>
<td valign="top" align="center">0.13</td>
<td valign="top" align="center">0.05</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Each entry is obtained by averaging 10 trajectories with different random perturbations; the variance is very small, so we omit confidence intervals for clarity.</p>
</table-wrap-foot>
</table-wrap></sec>
<sec>
<label>4.12</label>
<title>Simulation studies</title>
<p>To complement the main experiments, we further evaluate the robustness of F<sup>2</sup>-CommNet under controlled noise conditions using the synthetic dynamic SBM described in Section 4.1. As noted previously, the dataset includes three perturbation levels <italic>p</italic> &#x02208; {0.02, 0.05, 0.10}, corresponding to low, moderate, and high noise. This section examines the model&#x00027;s stability and hallucination suppression behavior across these noise regimes.</p>
<p>To validate the theoretical framework of F<sup>2</sup>-CommNet, we perform a hierarchy of simulations, ranging from toy graphs to synthetic networks and real-world benchmarks. This staged design illustrates how fractional dynamics, Fourier spectral filtering, and Lyapunov-based analysis jointly contribute to stability enhancement and hallucination suppression.</p>
<p>The Laplacian eigenvalues of the 10-vertex synthetic graph are</p>
<disp-formula id="E23"><mml:math id="M56"><mml:mrow><mml:mtext>&#x003BB;</mml:mtext><mml:mrow><mml:mo stretchy="false">(</mml:mo><mml:mrow><mml:mi>L</mml:mi></mml:mrow><mml:mo stretchy="false">)</mml:mo></mml:mrow><mml:mo>&#x02248;</mml:mo><mml:mrow><mml:mo stretchy="false">{</mml:mo><mml:mrow><mml:mn>0</mml:mn><mml:mo>.</mml:mo><mml:mn>00</mml:mn><mml:mo>,</mml:mo><mml:mn>1</mml:mn><mml:mo>.</mml:mo><mml:mn>27</mml:mn><mml:mo>,</mml:mo><mml:mn>2</mml:mn><mml:mo>.</mml:mo><mml:mn>15</mml:mn><mml:mo>,</mml:mo><mml:mn>3</mml:mn><mml:mo>.</mml:mo><mml:mn>62</mml:mn><mml:mo>,</mml:mo><mml:mn>4</mml:mn><mml:mo>.</mml:mo><mml:mn>10</mml:mn><mml:mo>,</mml:mo><mml:mn>5</mml:mn><mml:mo>.</mml:mo><mml:mn>48</mml:mn><mml:mo>,</mml:mo><mml:mn>6</mml:mn><mml:mo>.</mml:mo><mml:mn>33</mml:mn><mml:mo>,</mml:mo><mml:mn>7</mml:mn><mml:mo>.</mml:mo><mml:mn>89</mml:mn><mml:mo>,</mml:mo><mml:mn>9</mml:mn><mml:mo>.</mml:mo><mml:mn>05</mml:mn><mml:mo>,</mml:mo><mml:mn>11</mml:mn><mml:mo>.</mml:mo><mml:mn>22</mml:mn></mml:mrow><mml:mo stretchy="false">}</mml:mo></mml:mrow><mml:mo>,</mml:mo></mml:mrow></mml:math></disp-formula>
<p>revealing a rich spectral structure. The smallest eigenvalue &#x003BB;<sub>1</sub> &#x0003D; 0 corresponds to the trivial constant mode, mid-range modes (e.g., &#x003BB;<sub>3</sub>, &#x003BB;<sub>4</sub>) encode coarse community partitions, while the largest eigenvalues (&#x003BB;<sub>9</sub>, &#x003BB;<sub>10</sub>) correspond to highly oscillatory modes that dominate hallucination channels. As shown in Section 3.8, decreasing the fractional order &#x003B1; suppresses such unstable modes, enlarging the stability margin &#x003C1; and reducing the hallucination index &#x003B7;<sub>max</sub>.</p>
<p><bold>Experiment 1: Baseline Integer Dynamics</bold>.</p>
<p>Integer-order dynamics (&#x003B1; &#x0003D; 1.0) follow classical exponential decay. As illustrated in <xref ref-type="fig" rid="F5">Figure 5</xref>, integer-order dynamics (&#x003B1; &#x0003D; 1.0) demonstrate exponential decay. However, high-frequency eigenmodes remain unstable, amplifying oscillations and destabilizing node trajectories. Although partial suppression occurs in low-frequency modes, the lack of robustness in high-frequency channels highlights the inherent limitations of classical integer-order updates, motivating the introduction of fractional damping.</p>
<fig position="float" id="F5">
<label>Figure 5</label>
<caption><p>Time evolution of vertex states <italic>v</italic><sub>1</sub>&#x02013;<italic>v</italic><sub>10</sub> under different settings. Blue curves represent clean integer-order dynamics (&#x003B1; &#x0003D; 1.0), red dashed curves denote noisy integer-order dynamics, and green curves show noisy fractional-order dynamics (&#x003B1; &#x0003D; 0.8). Fractional damping suppresses oscillations and confines unstable modes, consistent with the suppression mechanism discussed in Section 3.9.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0005.tif">
<alt-text content-type="machine-generated">Nine line graphs arranged in a three-by-three grid show vertex dynamics over time from zero to thirty. Each graph displays three lines: a blue line for clean dynamics, a red dashed line for noisy dynamics with alpha equal to one point zero, and a green line for noisy dynamics with alpha equal to zero point eight. The y-axis values range from negative zero point five to zero point five. Each graph is labeled from &#x0201C;Vertex v1 dynamics&#x0201D; to &#x0201C;Vertex v10 dynamics.&#x0201D; </alt-text>
</graphic>
</fig>
<p><bold>Experiment 2: Fractional Damping</bold>.</p>
<p>When governed by fractional order &#x003B1; &#x0003D; 0.8, the system exhibits long-memory smoothing. As illustrated in <xref ref-type="fig" rid="F6">Figure 6</xref>, the Mittag&#x02013;Leffler decay suppresses oscillations and enforces stable convergence, even under moderate perturbations. Compared with integer-order dynamics, fractional damping converges more slowly at first but achieves greater long-term robustness. This matches the theoretical claim that fractional updates redistribute dissipation across time, thereby suppressing hallucination-prone modes.</p>
<fig position="float" id="F6">
<label>Figure 6</label>
<caption><p>Heatmap comparison of vertex dynamics across time. <bold>(Left)</bold> clean integer-order dynamics (&#x003B1; &#x0003D; 1.0); <bold>(Middle)</bold> noisy integer-order dynamics amplifying instabilities; <bold>(Right)</bold> noisy fractional-order dynamics (&#x003B1; &#x0003D; 0.8) where oscillations are confined to bounded ranges. Fractional dynamics reshape the spectral stability landscape and mitigate hallucination-prone modes.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0006.tif">
<alt-text content-type="machine-generated">Three heatmaps show state values over time with varying noise levels. The first is labeled &#x0201C;Clean &#x003B1;=1.0,&#x0201D; the second &#x0201C;Noisy &#x003B1;=1.0,&#x0201D; and the third &#x0201C;Noisy &#x003B1;=0.8.&#x0201D; Color bars range from about -0.2 to 0.6 (first two) and -0.4 to 0.8 (third), indicating different state values. The x-axis is time, and the y-axis is vertex index.</alt-text>
</graphic>
</fig>
<p><bold>Experiment 3: Parameter Sweep</bold>.</p>
<p>We sweep &#x003B1; &#x02208; [0.5, 1.0] to quantify robustness. As shown in <xref ref-type="table" rid="T9">Table 9</xref> and <xref ref-type="fig" rid="F7">Figure 7</xref>, smaller &#x003B1; consistently enlarges &#x003C1; and reduces &#x003B7;<sub>max</sub>, though convergence slows for &#x003B1; &#x02264; 0.6. The range &#x003B1; &#x02208; (0.7, 0.9) offers the best trade-off between speed and stability, matching the theoretical condition in <xref ref-type="disp-formula" rid="EQ22">Equation 22</xref>.</p>
<fig position="float" id="F7">
<label>Figure 7</label>
<caption><p>Baseline integer-order dynamics (&#x003B1; &#x0003D; 1.0) on the Cora dataset. The system follows exponential decay, but high-frequency eigenmodes remain unstable, leading to amplified oscillations and destabilized trajectories. While low-frequency components exhibit suppression, the persistence of unstable modes highlights the fragility of integer-order updates.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0007.tif">
<alt-text content-type="machine-generated">Graph titled &#x0201C;Experiment 1: Baseline Integer Dynamics (&#x003B1; = 1.0)&#x0201D; showing four mode amplitudes over time. Blue line represents low-frequency mode (stable), green for mid-frequency (partly stable), red dashed for high-frequency (unstable), and yellow dotted for divergent mode. Time on x-axis, amplitude on y-axis.</alt-text>
</graphic>
</fig>
<p><bold>Experiment 4: Perturbation Analysis</bold>.</p>
<p>We next test robustness under explicit edge perturbations (&#x00394;<italic>w</italic><sub>14</sub> &#x0003D; 0.5, &#x00394;<italic>w</italic><sub>25</sub> &#x0003D; 0.8, &#x00394;<italic>w</italic><sub>36</sub> &#x0003D; 1.0). Integer-order dynamics amplify noise via unstable high-frequency modes, while fractional-order dynamics confine oscillations to bounded trajectories (<xref ref-type="fig" rid="F8">Figure 8</xref>). <xref ref-type="table" rid="T13">Table 13</xref> quantifies this effect, demonstrating fractional damping reduces &#x003B7;<sub>max</sub> and enlarges &#x003C1;, consistent with the Lyapunov boundedness theorem (<xref ref-type="disp-formula" rid="EQ21">Equation 21</xref>).</p>
<fig position="float" id="F8">
<label>Figure 8</label>
<caption><p>Error dynamics under perturbations. Integer-order dynamics amplify oscillations and diverge, whereas fractional dynamics confine trajectories within bounded ranges.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0008.tif">
<alt-text content-type="machine-generated">Line graph titled &#x0201C;Error dynamics under perturbations&#x0201D; compares integer-order (&#x003B1;=1.0) and fractional-order (&#x003B1;=0.8) trajectories over time. The red line for integer-order fluctuates significantly, peaking at 2.0, while the blue line for fractional-order shows milder oscillations. Time is on the x-axis and trajectory amplitude on the y-axis, ranging from -1.0 to 2.0.</alt-text>
</graphic>
</fig>
<table-wrap position="float" id="T13">
<label>Table 13</label>
<caption><p>Perturbation analysis on the Cora dataset.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Method</bold></th>
<th valign="top" align="center"><bold>&#x003B7;<sub>max</sub>&#x02193;</bold></th>
<th valign="top" align="center"><bold><italic>&#x003C1;&#x02191;</italic></bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Integer-order (&#x003B1; &#x0003D; 1.0)</td>
<td valign="top" align="center">0.47</td>
<td valign="top" align="center">0.08</td>
</tr>
<tr>
<td valign="top" align="left">Fractional-order (&#x003B1; &#x0003D; 0.8)</td>
<td valign="top" align="center"><bold>0.29</bold></td>
<td valign="top" align="center"><bold>0.20</bold></td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Fractional damping (&#x003B1; &#x0003D; 0.8) suppresses noise growth. All reported values are means over 10 perturbation trials; confidence intervals are omitted because the spread is minimal and does not affect the qualitative conclusions.</p>
</table-wrap-foot>
</table-wrap>
<p><bold>Experiment 5: Spectral Hallucination Indices (Sections 3.10, 3.11)</bold>.</p>
<p>Finally, we evaluate hallucination indices at the spectral level. <xref ref-type="table" rid="T14">Table 14</xref> shows that fractional damping (&#x003B1; &#x0003D; 0.8) selectively stabilizes mid-frequency modes, shifting Mode 3 from unstable to stable. High-frequency modes remain unstable but with reduced growth, consistent with bounded dynamics observed in Experiment 4. <xref ref-type="fig" rid="F9">Figure 9</xref> confirms Lyapunov decay <italic>V</italic>(<italic>t</italic>) is monotone under fractional updates, validating the theoretical stability guarantees.</p>
<table-wrap position="float" id="T14">
<label>Table 14</label>
<caption><p>Spectral hallucination analysis on the Cora dataset.</p></caption>
<table frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left"><bold>Mode <italic>k</italic></bold></th>
<th valign="top" align="center"><bold>Eigenvalue &#x003BB;<sub><italic>k</italic></sub></bold></th>
<th valign="top" align="center"><bold>Stability (&#x003B1; &#x0003D; 1.0)</bold></th>
<th valign="top" align="center"><bold>Stability (&#x003B1; &#x0003D; 0.8)</bold></th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Low-freq (1&#x02013;2)</td>
<td valign="top" align="center">0.0&#x02013;2.9</td>
<td valign="top" align="center">stable</td>
<td valign="top" align="center">more stable</td>
</tr>
<tr>
<td valign="top" align="left">Mid-freq (3&#x02013;5)</td>
<td valign="top" align="center">3.0&#x02013;5.5</td>
<td valign="top" align="center">partly unstable</td>
<td valign="top" align="center">stabilized</td>
</tr>
<tr>
<td valign="top" align="left">High-freq (6&#x02013;10)</td>
<td valign="top" align="center">5.6&#x02013;11.2</td>
<td valign="top" align="center">unstable</td>
<td valign="top" align="center">unstable (reduced growth)</td>
</tr>
<tr>
<td valign="top" align="left">&#x003B7;<sub>max</sub></td>
<td valign="top" align="center">&#x02013;</td>
<td valign="top" align="center">0.42</td>
<td valign="top" align="center"><bold>0.28</bold> (&#x02193;)</td>
</tr>
<tr>
<td valign="top" align="left">&#x003C1; margin</td>
<td valign="top" align="center">&#x02013;</td>
<td valign="top" align="center">0.07</td>
<td valign="top" align="center"><bold>0.21</bold> (&#x02191;)</td>
</tr></tbody>
</table>
<table-wrap-foot>
<p>Fractional damping (&#x003B1; &#x0003D; 0.8) stabilizes mid-frequency modes and reduces &#x003B7;<sub>max</sub>. Mode-wise stability labels and summary statistics are derived from 10 independent runs; since the qualitative pattern is identical across seeds, we omit confidence intervals for brevity.</p>
</table-wrap-foot>
</table-wrap>
<fig position="float" id="F9">
<label>Figure 9</label>
<caption><p>Lyapunov function decay <italic>V</italic>(<italic>t</italic>) under integer-order (&#x003B1; &#x0003D; 1.0) and fractional-order (&#x003B1; &#x0003D; 0.8) dynamics. Fractional damping ensures smoother convergence and tighter stability bounds.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0009.tif">
<alt-text content-type="machine-generated">Graph depicting Lyapunov function decay over time. The red line represents integer-order Lyapunov V(t) with oscillations, while the blue line depicts fractional-order Lyapunov V(t) with smoother decay. Both lines converge toward zero. Time ranges from 0 to 10 on the x-axis, and V(t) from 0 to 1 on the y-axis.</alt-text>
</graphic>
</fig>
<sec>
<label>4.12.1</label>
<title>Summary of simulation results</title>
<p>Across all five experiments, three consistent findings emerge:</p>
<list list-type="bullet">
<list-item><p>High-frequency eigenmodes are the primary catalysts of hallucinations, driving unstable oscillations.</p></list-item>
<list-item><p>Fractional damping selectively stabilizes mid-frequency modes, confining noise to bounded ranges and reducing &#x003B7;<sub>max</sub>.</p></list-item>
<list-item><p>The optimal range &#x003B1; &#x02208; (0.7, 0.9) balances convergence speed with robustness, maximizing stability margin &#x003C1; while suppressing hallucinations.</p></list-item>
</list></sec></sec>
<sec>
<label>4.13</label>
<title>Qualitative results on real datasets</title>
<p>To complement the quantitative evaluation, we provide qualitative analyses on two representative real-world datasets, illustrating how F<sup>2</sup>-CommNet suppresses hallucination-prone structures and stabilizes community embeddings.</p>
<p><bold>Cora: t-SNE embedding visualization</bold>. <xref ref-type="fig" rid="F10">Figure 10</xref> compares the node embeddings produced by GCN, EvolveGCN, and F<sup>2</sup>-CommNet using t-SNE. GCN and EvolveGCN exhibit scattered and overlapping clusters, indicating unstable high-frequency modes that distort community boundaries. In contrast, F<sup>2</sup>-CommNet produces compact and well-separated clusters with significantly fewer noisy points, visually confirming the suppression of hallucination artifacts predicted by the spectral analysis.</p>
<fig position="float" id="F10">
<label>Figure 10</label>
<caption><p>t-SNE visualization of embeddings on the Cora dataset. F<sup>2</sup>-CommNet forms clearer clusters with fewer scattered points, indicating reduced hallucination and improved structural stability.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0010.tif">
<alt-text content-type="machine-generated">Three scatter plots comparing data distributions for GCN, EvolveGCN, and F2-CommNet. Each plot features clusters of colored dots: blue, green, and yellow, representing different data points. The arrangement of dots varies across the plots, reflecting different clustering patterns.</alt-text>
</graphic>
</fig>
<p><bold>Reddit: Training stability curves</bold>. <xref ref-type="fig" rid="F11">Figure 11</xref> reports the evolution of the stability margin &#x003C1;(<italic>t</italic>) and hallucination index &#x003B7;<sub>max</sub>(<italic>t</italic>) during training on Reddit. GCN and TGN exhibit strong oscillations and intermittent spikes in &#x003B7;<sub>max</sub>, revealing the presence of unstable spectral modes. EvolveGCN partially mitigates this behavior but still suffers from fluctuations. F<sup>2</sup>-CommNet maintains consistently higher &#x003C1;(<italic>t</italic>) and substantially lower &#x003B7;<sub>max</sub>(<italic>t</italic>) throughout training, demonstrating robust suppression of hallucination-prone eigenmodes on large-scale social networks.</p>
<fig position="float" id="F11">
<label>Figure 11</label>
<caption><p>Training dynamics on the Reddit dataset. <bold>(a)</bold> Stability margin &#x003C1; over training epochs. <bold>(b)</bold> Hallucination index &#x003B7;<sub>max</sub> over training epochs. F<sup>2</sup>-CommNet maintains a higher stability margin and a lower hallucination index throughout training, while baseline methods exhibit instability and oscillatory behaviors.</p></caption>
<graphic mimetype="image" mime-subtype="tiff" xlink:href="fncom-19-1731452-g0011.tif">
<alt-text content-type="machine-generated">Two line graphs comparing GCN, EvolveGCN, and F&#x000B2;-CommNet over epochs. The left graph shows the stability margin, where F&#x000B2;-CommNet consistently performs better. The right graph depicts the hallucination index, where F&#x000B2;-CommNet shows a decreasing trend, outperforming the others.</alt-text>
</graphic>
</fig>
<p><bold>Summary</bold>. Across citation and social networks, F<sup>2</sup>-CommNet consistently generates more stable, coherent, and hallucination-resistant embeddings. These qualitative results align with the theoretical predictions of the fractional damping mechanism.</p></sec></sec>
<sec sec-type="discussion" id="s5">
<label>5</label>
<title>Discussion</title>
<p>The proposed F<sup>2</sup>-CommNet framework advances community detection by integrating fractional-order dynamics with Fourier spectrum filtering, which systematically suppresses unstable modes prone to hallucination. Our theoretical analysis demonstrates that fractional damping enlarges the Lyapunov stability margin effectively constrains error propagation paths in the presence of disturbances. This aligns with previous findings on instability in deep GNNs (<xref ref-type="bibr" rid="B26">Oono and Suzuki, 2020</xref>; <xref ref-type="bibr" rid="B2">Balcilar et al., 2021</xref>), while providing a constructive remedy grounded in fractional calculus.</p>
<p>Compared with traditional GNNs such as GCN and GAT, F<sup>2</sup>-CommNet shows enhanced robustness against over-smoothing and spectral noise. Prior works have attempted to stabilize message passing through residual connections (<xref ref-type="bibr" rid="B22">Li et al., 2019</xref>), polynomial filters (<xref ref-type="bibr" rid="B21">Levie et al., 2019</xref>), or regularization schemes such as DropEdge (<xref ref-type="bibr" rid="B33">Rong et al., 2020</xref>), yet they remain vulnerable to mode hallucinations. Our results indicate that the memory terms introduced by fractional dynamics act as intrinsic stabilizers, strengthening the spectral filtering and enabling interpretable clustering.</p>
<p>Recent stability-oriented GNNs such as SO-GCN (<xref ref-type="bibr" rid="B8">Chen et al., 2025c</xref>) and LDC-GAT (<xref ref-type="bibr" rid="B7">Chen et al., 2025b</xref>) offer valuable insights, but they are designed for semi-supervised node classification on static graphs, relying on label-driven objectives and task-specific stability constraints. Conversely, dynamic community recognition necessitates unsupervised optimization of structural modularity across developing graph snapshots. Furthermore, the supplementary Jacobian-based or norm-constrained calculations in these models impose significant overhead, while F<sup>2</sup>-CommNet maintains a near-linear complexity via its fractional difference operator, rendering it more appropriate for extensive dynamic environments.</p>
<p>The empirical improvements observed in modularity, ARI, and calibration metrics confirm that fractional-Fourier coupling provides a generalizable mechanism. This is consistent with analogous results in fractional control theory (<xref ref-type="bibr" rid="B10">Diboune et al., 2024</xref>), where memory-induced damping yields resilience beyond integer-order models. In graph learning, Fourier-based filters have been studied in spectral GNNs (<xref ref-type="bibr" rid="B21">Levie et al., 2019</xref>), but the coupling with fractional operators introduces a novel design paradigm. Ablation studies further reveal that while each component&#x02014;fractional damping, Fourier filtering, and Lyapunov-based refinement&#x02014;improves performance individually, their combination is essential for hallucination suppression.</p>
<p>Beyond algorithmic contributions, the framework raises questions of interpretability and scalability. Fractional dynamics introduce hyperparameters (e.g., order &#x003B1;, leakage rate &#x003C1;) whose selection influences stability guarantees. Although our theoretical bounds guide parameter choice, adaptive tuning strategies remain an open challenge. Scalability also requires attention: Fourier filtering benefits from efficient polynomial approximations, whereas fractional integration is computationally heavier. Hybrid approximations, such as truncated Gr&#x000FC;nwald&#x02013;Letnikov operators may offer a balance between accuracy and efficiency.</p>
<p>Looking ahead, three directions appear promising. First, extending F<sup>2</sup>-CommNet to temporal multiplex networks may enhance robustness in heterogeneous dynamic environments, resonating with advances in temporal community detection and multiplex modeling. Second, connections with Bayesian uncertainty modeling (<xref ref-type="bibr" rid="B39">Wang et al., 2024</xref>) suggest opportunities to combine probabilistic calibration with fractional stability, building on recent developments in Bayesian GNNs and uncertainty quantification (<xref ref-type="bibr" rid="B43">Zhang et al., 2020</xref>). Third, deploying F<sup>2</sup>-CommNet in applied domains such as smart grids, epidemiological contact networks, and multimodal social platforms will allow further evaluation of its interpretability and hallucination resistance (<xref ref-type="bibr" rid="B41">Ying et al., 2019</xref>).</p>
<p>In summary, this study unites spectral graph theory, fractional-order calculus, and neural dynamics to address instability in GNN-based community detection. By leveraging memory-driven fractional damping and Fourier spectral filtering, F<sup>2</sup>-CommNet, establishes a foundation for interpretable, stable, and scalable graph learning models.</p></sec>
<sec sec-type="conclusions" id="s6">
<label>6</label>
<title>Conclusion</title>
<p>This study presented F<sup>2</sup>-CommNet, a fractional-Fourier hybrid framework for dynamic community detection. By combining fractional-order dynamics, Fourier spectral filtering and stability-aware refinement, the model offers both theoretical guarantees and practical scalability.</p>
<p><bold>Theoretical impact</bold>. Our fractional Lyapunov analysis demonstrates that the proposed framework enlarges the stability margin &#x003C1; by more than <bold>3 &#x000D7; </bold> (on average from 0.12 to 0.41 across datasets) and reduces the hallucination index &#x003B7;<sub>max</sub> by up to <bold>35%</bold> (from 0.31 to 0.20). These results provide explicit robustness criteria rarely found in prior community detection literature.</p>
<p><bold>Empirical performance</bold>. Across seven benchmarks (Cora, Citeseer, PubMed, Reddit, Enron, DBLP, BioGRID), F<sup>2</sup>-CommNet improves Adjusted Rand Index (ARI) by up to <bold>25%</bold> (e.g., Cora: 0.58 &#x02192; 0.73) and Normalized Mutual Information (NMI) by <bold>15%</bold> (PubMed: 0.49 &#x02192; 0.56). Compared with static baselines (GCN, GAT), the improvements are consistent, while relative to dynamic baselines (DyGCN, EvolveGCN), additional gains of <bold>3&#x02013;6%</bold> ARI are observed. Overall, as summarized in <xref ref-type="table" rid="T7">Table 7</xref>, F<sup>2</sup>-CommNet achieves the best result in <bold>32 out of 35</bold> metric-dataset pairs. Moreover, the variance across 10 independent runs remains below <bold>2%</bold>, confirming robustness and reproducibility.</p>
<p><bold>Practical scalability</bold>. The complexity remains near-linear, <italic>O</italic>(<italic>nHd</italic>&#x0002B;<italic>nr</italic>log<italic>n</italic>), with <italic>H</italic>&#x0226A;<italic>n</italic> and spectral rank <italic>r</italic>&#x0226A;<italic>n</italic>. On large graphs, the method scales to millions of nodes: on Reddit (232k nodes, 11.6M edges), F<sup>2</sup>-CommNet reduces training time per epoch by <bold>18%</bold> compared with EvolveGCN (42.5s &#x02192; 34.7s), while on DBLP (317k nodes, 1.6M edges) it lowers peak memory usage by <bold>21%</bold>. These quantitative results highlight that the method is not only more accurate, but also computationally efficient in industrial-scale settings.</p>
<p>In summary, F<sup>2</sup>-CommNet delivers measurable and reproducible gains: <bold>&#x0002B;25% ARI</bold>, <bold>&#x0002B;15% NMI</bold>, <bold>3 &#x000D7; </bold> <bold>stability margin</bold>, <bold>&#x02013;35% hallucinations</bold>, and <bold>32/35 wins</bold> across benchmarks, with variance &#x0003C; <bold>2%</bold> and training time reduced by up to <bold>18%</bold> on large-scale graphs. These results demonstrate that fractional-Fourier modeling provides a rigorous and scalable foundation for robust dynamic graph learning.</p></sec>
<sec id="s7">
<label>7</label>
<title>Future work</title>
<p>Despite F<sup>2</sup>-CommNet demonstrating considerable advancements in hallucination suppression and community interpretability, numerous avenues for further exploration persist. Future endeavors will concentrate on scaling the methodology to billion-scale graphs via distributed spectral filtering and efficient fractional solvers, expanding the framework to dynamic and temporal networks, and investigating adaptive strategies for the selection of the fractional order &#x003B1;.</p>
<p>In addition, recent stability-oriented architectures such as SO-GCN (<xref ref-type="bibr" rid="B8">Chen et al., 2025c</xref>) and LDC-GAT (<xref ref-type="bibr" rid="B7">Chen et al., 2025b</xref>) suggest promising constraint-based mechanisms. An intriguing avenue of exploration is to examine the potential generalization of their stability principles to unsupervised and dynamic clustering contexts, hence enhancing boundary preservation in changing graphs.</p>
<p>Furthermore, implementing the model in cross-domain issues such as cybersecurity, protein-protein interactions, and knowledge graph reasoning could enhance its influence. Ultimately, additional theoretical examination, especially concerning stochastic perturbations and generalization assurances, could reinforce the mathematical underpinnings of Fourier&#x02013;fractional graph learning.</p></sec>
</body>
<back>
<sec sec-type="data-availability" id="s8">
<title>Data availability statement</title>
<p>The original contributions presented in the study are included in the article/supplementary material, further inquiries can be directed to the corresponding authors.</p>
</sec>
<sec sec-type="author-contributions" id="s9">
<title>Author contributions</title>
<p>DQ: Conceptualization, Data curation, Formal analysis, Funding acquisition, Investigation, Methodology, Project administration, Resources, Software, Supervision, Validation, Visualization, Writing &#x02013; original draft, Writing &#x02013; review &#x00026; editing. YM: Conceptualization, Data curation, Formal analysis, Funding acquisition, Investigation, Methodology, Project administration, Resources, Software, Supervision, Validation, Visualization, Writing &#x02013; original draft, Writing &#x02013; review &#x00026; editing.</p>
</sec>
<sec sec-type="COI-statement" id="conf1">
<title>Conflict of interest</title>
<p>The author(s) declared that this work was conducted in the absence of any commercial or financial relationships that could be construed as a potential conflict of interest.</p>
</sec>
<sec sec-type="ai-statement" id="s11">
<title>Generative AI statement</title>
<p>The author(s) declared that generative AI was not used in the creation of this manuscript.</p>
<p>Any alternative text (alt text) provided alongside figures in this article has been generated by Frontiers with the support of artificial intelligence and reasonable efforts have been made to ensure accuracy, including review by the authors wherever possible. If you identify any issues, please contact us.</p></sec>
<sec sec-type="disclaimer" id="s12">
<title>Publisher&#x00027;s note</title>
<p>All claims expressed in this article are solely those of the authors and do not necessarily represent those of their affiliated organizations, or those of the publisher, the editors and the reviewers. Any product that may be evaluated in this article, or claim that may be made by its manufacturer, is not guaranteed or endorsed by the publisher.</p>
</sec>
<ref-list>
<title>References</title>
<ref id="B1">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Abbahaddou</surname> <given-names>Y.</given-names></name> <name><surname>Ennadir</surname> <given-names>S.</given-names></name> <name><surname>Lutzeyer</surname> <given-names>J. F.</given-names></name> <name><surname>Vazirgiannis</surname> <given-names>M.</given-names></name></person-group> (<year>2024</year>). &#x0201D;Bounding the expected robustness of graph neural networks subject to node feature attacks,&#x0201D; in <italic>Proceedings of the International Conference on Learning Representations (ICLR)</italic> (Amherst, MA: OpenReview.net).</mixed-citation>
</ref>
<ref id="B2">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Balcilar</surname> <given-names>M.</given-names></name> <name><surname>Renton</surname> <given-names>G.</given-names></name> <name><surname>H&#x000E9;roux</surname> <given-names>P.</given-names></name> <name><surname>Ga&#x000FC;z&#x000E8;re</surname> <given-names>B.</given-names></name> <name><surname>Adam</surname> <given-names>S.</given-names></name> <name><surname>Honeine</surname> <given-names>P.</given-names></name></person-group> (<year>2021</year>). &#x0201D;Analyzing the expressive power of graph neural networks in a spectral perspective,&#x0201D; in <italic>International Conference on Learning Representations</italic> (Amherst, MA: OpenReview.net).</mixed-citation>
</ref>
<ref id="B3">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cai</surname> <given-names>X.</given-names></name> <name><surname>Wang</surname> <given-names>B.</given-names></name></person-group> (<year>2023</year>). <article-title>A graph convolutional fusion model for community detection in multiplex networks</article-title>. <source>Data Min. Knowl. Discov</source>. <volume>37</volume>, <fpage>1518</fpage>&#x02013;<lpage>1547</lpage>. doi: <pub-id pub-id-type="doi">10.1007/s10618-023-00932-w</pub-id></mixed-citation>
</ref>
<ref id="B4">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Casteigts</surname> <given-names>A.</given-names></name> <name><surname>Flocchini</surname> <given-names>P.</given-names></name> <name><surname>Quattrociocchi</surname> <given-names>W.</given-names></name> <name><surname>Santoro</surname> <given-names>N.</given-names></name></person-group> (<year>2023</year>). <article-title>Time-varying graphs and dynamic networks</article-title>. <source>Theoret. Comput. Sci</source>. <volume>929</volume>, <fpage>45</fpage>&#x02013;<lpage>69</lpage>. doi: <pub-id pub-id-type="doi">10.1007/978-3-642-22450-8_27</pub-id></mixed-citation>
</ref>
<ref id="B5">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>J.</given-names></name> <name><surname>Wang</surname> <given-names>S.</given-names></name> <name><surname>He</surname> <given-names>L.</given-names></name></person-group> (<year>2023</year>). <article-title>Stability of graph neural networks for community detection</article-title>. <source>Neurocomputing</source> <volume>514</volume>, <fpage>48</fpage>&#x02013;<lpage>61</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.neucom.2023.01.072</pub-id></mixed-citation>
</ref>
<ref id="B6">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>L.</given-names></name> <name><surname>Zhou</surname> <given-names>Q.</given-names></name> <name><surname>Zhao</surname> <given-names>D.</given-names></name></person-group> (<year>2025a</year>). <article-title>k-plex-based community detection with graph neural networks</article-title>. <source>Inform. Sci</source>. <volume>689</volume>:<fpage>121509</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.ins.2024.121509</pub-id></mixed-citation>
</ref>
<ref id="B7">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>L.</given-names></name> <name><surname>Zhu</surname> <given-names>H.</given-names></name> <name><surname>Han</surname> <given-names>S.</given-names></name></person-group> (<year>2025b</year>). <article-title>Ldc-gat: a lyapunov-stable graph attention network with dynamic filtering and constraint-aware optimization</article-title>. <source>Axioms</source> <volume>14</volume>:<fpage>504</fpage>. doi: <pub-id pub-id-type="doi">10.3390/axioms14070504</pub-id></mixed-citation>
</ref>
<ref id="B8">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Chen</surname> <given-names>L.</given-names></name> <name><surname>Zhu</surname> <given-names>H.</given-names></name> <name><surname>Han</surname> <given-names>S.</given-names></name></person-group> (<year>2025c</year>). <article-title>Stability-optimized graph convolutional network: a novel propagation rule with constraints derived from odes</article-title>. <source>Mathematics</source> <volume>13</volume>:<fpage>761</fpage>. doi: <pub-id pub-id-type="doi">10.3390/math13050761</pub-id></mixed-citation>
</ref>
<ref id="B9">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Cheng</surname> <given-names>X.</given-names></name> <name><surname>Zhu</surname> <given-names>W.</given-names></name> <name><surname>Yan</surname> <given-names>W. Q.</given-names></name></person-group> (<year>2025</year>). <article-title>Centrality-aware collaborative network embedding for overlapping community detection</article-title>. <source>IEEE Trans. Netw. Sci. Eng</source>. <volume>13</volume>, <fpage>2236</fpage>&#x02013;<lpage>2250</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TNSE.2025.3611500</pub-id></mixed-citation>
</ref>
<ref id="B10">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Diboune</surname> <given-names>A.</given-names></name> <name><surname>Slimani</surname> <given-names>H.</given-names></name> <name><surname>Nacer</surname> <given-names>H.</given-names></name> <name><surname>Bey</surname> <given-names>K. B.</given-names></name></person-group> (<year>2024</year>). <article-title>A comprehensive survey on community detection methods and applications in complex information networks</article-title>. <source>Social Netw. Anal. Min</source>. <volume>14</volume>:<fpage>93</fpage>. doi: <pub-id pub-id-type="doi">10.1007/s13278-024-01246-5</pub-id></mixed-citation>
</ref>
<ref id="B11">
<mixed-citation publication-type="book"><person-group person-group-type="author"><name><surname>Golub</surname> <given-names>G. H.</given-names></name> <name><surname>Van Loan</surname> <given-names>C. F.</given-names></name></person-group> (<year>2013</year>). <source>Matrix Computations, 4th Edn</source>. <publisher-loc>Baltimore, MD</publisher-loc>: <publisher-name>Johns Hopkins University Press</publisher-name>.</mixed-citation>
</ref>
<ref id="B12">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Guo</surname> <given-names>B.</given-names></name> <name><surname>Deng</surname> <given-names>L.</given-names></name> <name><surname>Lian</surname> <given-names>T.</given-names></name></person-group> (<year>2025</year>). <article-title>Gcn-based unsupervised community detection with refined structure centers and expanded pseudo-labeled set</article-title>. <source>PLoS ONE</source> <volume>20</volume>:<fpage>e0327022</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0327022</pub-id><pub-id pub-id-type="pmid">40591653</pub-id></mixed-citation>
</ref>
<ref id="B13">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Holme</surname> <given-names>P.</given-names></name></person-group> (<year>2023</year>). <source>Temporal Network Theory. SpringerBriefs in Complexity</source>. Cham: Springer. doi: <pub-id pub-id-type="doi">10.1007/978-3-031-30399-9</pub-id></mixed-citation>
</ref>
<ref id="B14">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Hu</surname> <given-names>W.</given-names></name> <name><surname>Fey</surname> <given-names>M.</given-names></name> <name><surname>Zitnik</surname> <given-names>M.</given-names></name> <name><surname>Dong</surname> <given-names>Y.</given-names></name> <name><surname>Ren</surname> <given-names>H.</given-names></name> <name><surname>Liu</surname> <given-names>B.</given-names></name> <etal/></person-group>. (<year>2020</year>). &#x0201D;Open graph benchmark: datasets for machine learning on graphs,&#x0201D; in <italic>Advances in Neural Information Processing Systems (NeurIPS)</italic> (Red Hook, NY: Curran Associates, Inc.), Vol. <volume>33</volume>, <fpage>22118</fpage>&#x02013;<lpage>22133</lpage>.</mixed-citation>
</ref>
<ref id="B15">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kaiser</surname> <given-names>J.</given-names></name> <name><surname>F&#x000E4;hnrich</surname> <given-names>B.</given-names></name> <name><surname>Heintz</surname> <given-names>L.</given-names></name></person-group> (<year>2023</year>). <article-title>Ups and downs on &#x02018;r/science&#x00027; &#x02014; exploring the dynamics of science communication on reddit</article-title>. <source>J. Sci. Commun</source>. <volume>22</volume>:<fpage>A08</fpage>. doi: <pub-id pub-id-type="doi">10.22323/2.22020208</pub-id></mixed-citation>
</ref>
<ref id="B16">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kang</surname> <given-names>Q.</given-names></name> <name><surname>Zhao</surname> <given-names>K.</given-names></name> <name><surname>Ding</surname> <given-names>Q.</given-names></name> <name><surname>Ji</surname> <given-names>F.</given-names></name> <name><surname>Li</surname> <given-names>X.</given-names></name> <name><surname>Liang</surname> <given-names>W.</given-names></name> <etal/></person-group>. (<year>2024</year>). &#x0201D;Unleashing the potential of fractional calculus in graph neural networks with frond,&#x0201D; in <italic>Proceedings of the International Conference on Learning Representations (ICLR)</italic> (Amherst, MA: OpenReview.net).</mixed-citation>
</ref>
<ref id="B17">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kipf</surname> <given-names>T. N.</given-names></name> <name><surname>Welling</surname> <given-names>M.</given-names></name></person-group> (<year>2017</year>). &#x0201D;Semi-supervised classification with graph convolutional networks,&#x0201D; in <italic>International Conference on Learning Representations</italic> (Amherst, MA: OpenReview.net).</mixed-citation>
</ref>
<ref id="B18">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kojaku</surname> <given-names>S.</given-names></name> <name><surname>Radicchi</surname> <given-names>F.</given-names></name> <name><surname>Ahn</surname> <given-names>Y.-Y.</given-names></name></person-group> (<year>2024</year>). <article-title>Network community detection via neural embeddings</article-title>. <source>Nat. Communic</source>. <volume>15</volume>:<fpage>9446</fpage>. doi: <pub-id pub-id-type="doi">10.1038/s41467-024-52355-w</pub-id><pub-id pub-id-type="pmid">39487114</pub-id></mixed-citation>
</ref>
<ref id="B19">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Kumar</surname> <given-names>M.</given-names></name> <name><surname>Mehta</surname> <given-names>U.</given-names></name> <name><surname>Cirrincione</surname> <given-names>G.</given-names></name></person-group> (<year>2024</year>). <article-title>Enhancing neural network classification using fractional-order activation functions</article-title>. <source>AI Open</source> <volume>5</volume>, <fpage>10</fpage>&#x02013;<lpage>22</lpage>. doi: <pub-id pub-id-type="doi">10.1016/j.aiopen.2023.12.003</pub-id></mixed-citation>
</ref>
<ref id="B20">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Lambiotte</surname> <given-names>R.</given-names></name> <name><surname>Rosvall</surname> <given-names>M.</given-names></name></person-group> (<year>2022</year>). <article-title>Temporal community detection in evolving networks</article-title>. <source>Nat. Commun</source>. <volume>13</volume>:<fpage>345</fpage>.</mixed-citation>
</ref>
<ref id="B21">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Levie</surname> <given-names>R.</given-names></name> <name><surname>Isufi</surname> <given-names>E.</given-names></name> <name><surname>Kutyniok</surname> <given-names>G.</given-names></name></person-group> (<year>2019</year>). &#x0201D;On the transferability of spectral graph filters,&#x0201D; in <italic>2019 13th International conference on Sampling Theory and Applications (SampTA)</italic> (Piscataway, NJ: Institute of Electrical and Electronics Engineers (IEEE)), <fpage>1</fpage>&#x02013;<lpage>5</lpage>. doi: <pub-id pub-id-type="doi">10.1109/SampTA45681.2019.9030932</pub-id></mixed-citation>
</ref>
<ref id="B22">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Li</surname> <given-names>G.</given-names></name> <name><surname>M&#x000FC;ller</surname> <given-names>M.</given-names></name> <name><surname>Thabet</surname> <given-names>A.</given-names></name> <name><surname>Ghanem</surname> <given-names>B.</given-names></name></person-group> (<year>2019</year>). &#x0201D;Deepgcns: can gcns go as deep as cnns?,&#x0201D; in <italic>Proceedings of the IEEE/CVF International Conference on Computer Vision (ICCV)</italic> (Piscataway, NJ: Institute of Electrical and Electronics Engineers (IEEE)), <fpage>9267</fpage>&#x02013;<lpage>9276</lpage>. doi: <pub-id pub-id-type="doi">10.1109/ICCV.2019.00936</pub-id></mixed-citation>
</ref>
<ref id="B23">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Liu</surname> <given-names>C.</given-names></name> <name><surname>Han</surname> <given-names>Y.</given-names></name> <name><surname>Xu</surname> <given-names>H.</given-names></name> <name><surname>Yang</surname> <given-names>S.</given-names></name> <name><surname>Wang</surname> <given-names>K.</given-names></name> <name><surname>Su</surname> <given-names>Y.</given-names></name></person-group> (<year>2024</year>). <article-title>A community detection and graph neural network based link prediction approach for scientific literature</article-title>. <source>Mathematics</source> <volume>12</volume>:<fpage>369</fpage>. doi: <pub-id pub-id-type="doi">10.3390/math12030369</pub-id></mixed-citation>
</ref>
<ref id="B24">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Manessi</surname> <given-names>F.</given-names></name> <name><surname>Rozza</surname> <given-names>A.</given-names></name> <name><surname>Manzo</surname> <given-names>M.</given-names></name></person-group> (<year>2020</year>). <article-title>Dynamic graph convolutional networks</article-title>. <source>Pattern Recogn</source>. <volume>97</volume>:<fpage>107000</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.patcog.2019.107000</pub-id></mixed-citation>
</ref>
<ref id="B25">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Maskey</surname> <given-names>S.</given-names></name> <name><surname>Paolino</surname> <given-names>R.</given-names></name> <name><surname>Bacho</surname> <given-names>A.</given-names></name> <name><surname>Kutyniok</surname> <given-names>G.</given-names></name></person-group> (<year>2023</year>). &#x0201D;A fractional graph laplacian approach to oversmoothing,&#x0201D; in <italic>Proceedings of the Neural Information Processing Systems (NeurIPS)</italic> (Red Hook, NY: Curran Associates, Inc.).</mixed-citation>
</ref>
<ref id="B26">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Oono</surname> <given-names>K.</given-names></name> <name><surname>Suzuki</surname> <given-names>T.</given-names></name></person-group> (<year>2020</year>). &#x0201D;Graph neural networks exponentially lose expressive power for node classification,&#x0201D; in <italic>Proceedings of the International Conference on Learning Representations (ICLR)</italic> (Amherst, MA: OpenReview.net).</mixed-citation>
</ref>
<ref id="B27">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Oughtred</surname> <given-names>R.</given-names></name> <name><surname>Rust</surname> <given-names>J.</given-names></name> <name><surname>Chang</surname> <given-names>C.</given-names></name> <name><surname>Breitkreutz</surname> <given-names>B.-J.</given-names></name> <name><surname>Stark</surname> <given-names>C.</given-names></name> <name><surname>Willems</surname> <given-names>A.</given-names></name> <etal/></person-group>. (<year>2021</year>). <article-title>Biogrid: a comprehensive biomedical resource of curated protein, genetic, and chemical interactions</article-title>. <source>Protein Sci</source>. <volume>30</volume>, <fpage>187</fpage>&#x02013;<lpage>200</lpage>. doi: <pub-id pub-id-type="doi">10.1002/pro.3978</pub-id></mixed-citation>
</ref>
<ref id="B28">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Panda</surname> <given-names>S. K.</given-names></name> <name><surname>Vijayakumar</surname> <given-names>V.</given-names></name> <name><surname>Agarwal</surname> <given-names>R. P.</given-names></name> <name><surname>Rasham</surname> <given-names>T.</given-names></name></person-group> (<year>2025</year>). <article-title>Fractional-order complex-valued neural networks: stability results, numerical simulations and application to game-theoretical decision making</article-title>. <source>Discrete Contin. Dyn. Syst.-S</source> <volume>18</volume>, <fpage>2622</fpage>&#x02013;<lpage>2643</lpage>. doi: <pub-id pub-id-type="doi">10.3934/dcdss.2025071</pub-id></mixed-citation>
</ref>
<ref id="B29">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pareja</surname> <given-names>A.</given-names></name> <name><surname>Domeniconi</surname> <given-names>G.</given-names></name> <name><surname>Chen</surname> <given-names>J.</given-names></name> <name><surname>Ma</surname> <given-names>T.</given-names></name> <name><surname>Suzumura</surname> <given-names>T.</given-names></name> <name><surname>Kanezashi</surname> <given-names>H.</given-names></name> <etal/></person-group>. (<year>2020</year>). &#x0201D;Evolvegcn: evolving graph convolutional networks for dynamic graphs,&#x0201D; in <source>Proceedings of the AAAI Conference on Artificial Intelligence</source>, vol. 34 (Palo Alto, CA: AAAI Press), <fpage>5363</fpage>&#x02013;<lpage>5370</lpage>. doi: <pub-id pub-id-type="doi">10.1609/aaai.v34i04.5984</pub-id></mixed-citation>
</ref>
<ref id="B30">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Pascanu</surname> <given-names>R.</given-names></name> <name><surname>Mikolov</surname> <given-names>T.</given-names></name> <name><surname>Bengio</surname> <given-names>Y.</given-names></name></person-group> (<year>2013</year>). &#x0201D;On the difficulty of training recurrent neural networks,&#x0201D; in <italic>Proceedings of the 30th International Conference on Machine Learning (ICML), volume 28 of Proceedings of Machine Learning Research</italic> (Brookline, MA: Proceedings of Machine Learning Research (PMLR)), <fpage>1310</fpage>&#x02013;<lpage>1318</lpage>.</mixed-citation>
</ref>
<ref id="B31">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Peixoto</surname> <given-names>T. P.</given-names></name></person-group> (<year>2019</year>). &#x0201D;Bayesian stochastic blockmodeling,&#x0201D; in <source>Advances in Network Clustering and Blockmodeling</source>, eds. P. Doreian, V. Batagelj, and A. Ferligoj (Cham: Springer), 289-332. doi: <pub-id pub-id-type="doi">10.1002/9781119483298.ch11</pub-id></mixed-citation>
</ref>
<ref id="B32">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Prokop</surname> <given-names>P.</given-names></name> <name><surname>Dr&#x000E1;&#x0017E;dilov&#x000E1;</surname> <given-names>P.</given-names></name> <name><surname>Plato&#x00161;</surname> <given-names>J.</given-names></name></person-group> (<year>2024</year>). <article-title>Overlapping community detection in weighted networks via hierarchical clustering</article-title>. <source>PLoS ONE</source> <volume>19</volume>:<fpage>e0312596</fpage>. doi: <pub-id pub-id-type="doi">10.1371/journal.pone.0312596</pub-id><pub-id pub-id-type="pmid">39466771</pub-id></mixed-citation>
</ref>
<ref id="B33">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Rong</surname> <given-names>Y.</given-names></name> <name><surname>Huang</surname> <given-names>W.</given-names></name> <name><surname>Xu</surname> <given-names>T.</given-names></name> <name><surname>Huang</surname> <given-names>J.</given-names></name></person-group> (<year>2020</year>). &#x0201D;Dropedge: towards deep graph convolutional networks on node classification,&#x0201D; in <italic>Proceedings of the International Conference on Learning Representations (ICLR)</italic> (Amherst, MA: OpenReview.net) (Accessed September 21, 2025).</mixed-citation>
</ref>
<ref id="B34">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Rossi</surname> <given-names>E.</given-names></name> <name><surname>Chamberlain</surname> <given-names>B.</given-names></name> <name><surname>Frasca</surname> <given-names>F.</given-names></name> <name><surname>Eynard</surname> <given-names>D.</given-names></name> <name><surname>Monti</surname> <given-names>F.</given-names></name> <name><surname>Bronstein</surname> <given-names>M.</given-names></name></person-group> (<year>2020</year>). &#x0201D;Temporal graph networks for deep learning on dynamic graphs,&#x0201D; in <italic>International Conference on Learning Representations (ICLR) Workshop</italic> (Amherst, MA: OpenReview.net).</mixed-citation>
</ref>
<ref id="B35">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Scaman</surname> <given-names>K.</given-names></name> <name><surname>Virmaux</surname> <given-names>A.</given-names></name></person-group> (<year>2018</year>). <article-title>Lipschitz regularity of deep neural networks: analysis and efficient estimation</article-title>. <source>Network</source> <volume>16</volume>:<fpage>18</fpage>.</mixed-citation>
</ref>
<ref id="B36">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Shah</surname> <given-names>N.</given-names></name></person-group> (<year>2022</year>). <article-title>An overview of spectral clustering</article-title>. <source>Appl. Comput. Harmon. Anal</source>. <volume>59</volume>, <fpage>100</fpage>&#x02013;<lpage>135</lpage>.</mixed-citation>
</ref>
<ref id="B37">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Sivalingam</surname> <given-names>S. M.</given-names></name> <name><surname>Govindaraj</surname> <given-names>V.</given-names></name></person-group> (<year>2025</year>). <article-title>Neural fractional order differential equations</article-title>. <source>Expert Syst. Applic</source>. <volume>267</volume>:<fpage>126041</fpage>. doi: <pub-id pub-id-type="doi">10.1016/j.eswa.2024.126041</pub-id></mixed-citation>
</ref>
<ref id="B38">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Velickovic</surname> <given-names>P.</given-names></name> <name><surname>Cucurull</surname> <given-names>G.</given-names></name> <name><surname>Casanova</surname> <given-names>A.</given-names></name> <name><surname>Romero</surname> <given-names>A.</given-names></name> <name><surname>Li&#x000F2;</surname> <given-names>P.</given-names></name> <name><surname>Bengio</surname> <given-names>Y.</given-names></name></person-group> (<year>2018</year>). &#x0201D;Graph attention networks,&#x0201D; in <italic>International Conference on Learning Representations</italic> (Amherst, MA: OpenReview.net).</mixed-citation>
</ref>
<ref id="B39">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Wang</surname> <given-names>F.</given-names></name> <name><surname>Liu</surname> <given-names>Y.</given-names></name> <name><surname>Liu</surname> <given-names>K.</given-names></name> <name><surname>Wang</surname> <given-names>Y.</given-names></name> <name><surname>Medya</surname> <given-names>S.</given-names></name> <name><surname>Yu</surname> <given-names>P. S.</given-names></name></person-group> (<year>2024</year>). <article-title>Uncertainty in graph neural networks: a survey</article-title>. <source>Trans. Mach. Learn. Res</source>. doi: <pub-id pub-id-type="doi">10.48550/arXiv.2403.07185</pub-id></mixed-citation>
</ref>
<ref id="B40">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Wu</surname> <given-names>Z.</given-names></name> <name><surname>Pan</surname> <given-names>S.</given-names></name> <name><surname>Chen</surname> <given-names>F.</given-names></name> <name><surname>Long</surname> <given-names>G.</given-names></name> <name><surname>Zhang</surname> <given-names>C.</given-names></name> <name><surname>Yu</surname> <given-names>P. S.</given-names></name></person-group> (<year>2021</year>). <article-title>A comprehensive survey on graph neural networks</article-title>. <source>IEEE Trans. Neural Netw. Learn. Syst</source>. <volume>32</volume>, <fpage>4</fpage>&#x02013;<lpage>24</lpage>. doi: <pub-id pub-id-type="doi">10.1109/TNNLS.2020.2978386</pub-id><pub-id pub-id-type="pmid">32217482</pub-id></mixed-citation>
</ref>
<ref id="B41">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Ying</surname> <given-names>Z.</given-names></name> <name><surname>Bourgeois</surname> <given-names>D.</given-names></name> <name><surname>You</surname> <given-names>J.</given-names></name> <name><surname>Zitnik</surname> <given-names>M.</given-names></name> <name><surname>Leskovec</surname> <given-names>J.</given-names></name></person-group> (<year>2019</year>). <article-title>Gnnexplainer: generating explanations for graph neural networks</article-title>. <source>Adv. Neural Inform. Process. Syst</source>. 32. doi: <pub-id pub-id-type="doi">10.48550/arXiv.1903.03894</pub-id><pub-id pub-id-type="pmid">32265580</pub-id></mixed-citation>
</ref>
<ref id="B42">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Yu</surname> <given-names>B.</given-names></name> <name><surname>Xu</surname> <given-names>X.</given-names></name> <name><surname>Wen</surname> <given-names>C.</given-names></name> <name><surname>Xie</surname> <given-names>Y.</given-names></name> <name><surname>Zhang</surname> <given-names>C.</given-names></name></person-group> (<year>2022</year>). &#x0201D;Hierarchical graph representation learning with structural attention for graph classification,&#x0201D; in <italic>CAAI International Conference on Artificial Intelligence, Lecture Notes in Computer Science</italic> (Cham: Springer), <fpage>473</fpage>&#x02013;<lpage>484</lpage>. doi: <pub-id pub-id-type="doi">10.1007/978-3-031-20500-2_39</pub-id></mixed-citation>
</ref>
<ref id="B43">
<mixed-citation publication-type="journal"><person-group person-group-type="author"><name><surname>Zhang</surname> <given-names>C.</given-names></name> <name><surname>Liu</surname> <given-names>F.</given-names></name> <name><surname>Zhou</surname> <given-names>L.</given-names></name> <name><surname>He</surname> <given-names>J.</given-names></name> <name><surname>Zhang</surname> <given-names>H.</given-names></name></person-group> (<year>2020</year>). <article-title>Bayesian graph neural networks for reliable prediction</article-title>. <source>IEEE Trans. Neural Netw. Learn. Syst</source>. <volume>31</volume>, <fpage>3214</fpage>&#x02013;<lpage>3229</lpage>.</mixed-citation>
</ref>
</ref-list>
<fn-group>
<fn fn-type="custom" custom-type="edited-by" id="fn0001">
<p>Edited by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/49205/overview">Si Wu</ext-link>, Peking University, China</p>
</fn>
<fn fn-type="custom" custom-type="reviewed-by" id="fn0002">
<p>Reviewed by: <ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3141049/overview">Wenjie Zhu</ext-link>, China Jiliang University, China</p>
<p><ext-link ext-link-type="uri" xlink:href="https://loop.frontiersin.org/people/3261930/overview">Han Shuguang</ext-link>, Zhejiang Sci-Tech University, China</p>
</fn>
</fn-group>
</back>
</article>