<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="review-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">JMIR Mhealth Uhealth</journal-id><journal-id journal-id-type="publisher-id">mhealth</journal-id><journal-id journal-id-type="index">13</journal-id><journal-title>JMIR mHealth and uHealth</journal-title><abbrev-journal-title>JMIR Mhealth Uhealth</abbrev-journal-title><issn pub-type="epub">2291-5222</issn><publisher><publisher-name>JMIR Publications</publisher-name><publisher-loc>Toronto, Canada</publisher-loc></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">v14i1e64144</article-id><article-id pub-id-type="doi">10.2196/64144</article-id><article-categories><subj-group subj-group-type="heading"><subject>Review</subject></subj-group></article-categories><title-group><article-title>Examining the Use of Consumer Wearable Devices and Digital Tools for Stress Measurement in College Students: Scoping Review of Methods</article-title></title-group><contrib-group><contrib contrib-type="author" corresp="yes" equal-contrib="yes"><name name-style="western"><surname>Sathyanarayana</surname><given-names>Aarti</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref><xref ref-type="fn" rid="equal-contrib1">*</xref></contrib><contrib contrib-type="author" equal-contrib="yes"><name name-style="western"><surname>Amin</surname><given-names>Ohida Binte</given-names></name><degrees>BSc</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref><xref ref-type="fn" rid="equal-contrib1">*</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>An</surname><given-names>Jennie</given-names></name><degrees>MS</degrees><xref ref-type="aff" rid="aff1">1</xref><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Onnela</surname><given-names>Jukka Pekka</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff3">3</xref></contrib></contrib-group><aff id="aff1"><institution>Bouve College of Health Sciences, Northeastern University</institution><addr-line>360 Huntington Avenue</addr-line><addr-line>Boston</addr-line><addr-line>MA</addr-line><country>United States</country></aff><aff id="aff2"><institution>Khoury College of Computer Science, Northeastern University</institution><addr-line>Boston</addr-line><addr-line>MA</addr-line><country>United States</country></aff><aff id="aff3"><institution>Department of Biostatistics, Harvard T.H. Chan School of Public Health, Harvard University</institution><addr-line>Boston</addr-line><addr-line>MA</addr-line><country>United States</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Brini</surname><given-names>Stefano</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>Matabuena</surname><given-names>Marcos</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Nath</surname><given-names>Rajdeep K</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Aarti Sathyanarayana, PhD, Bouve College of Health Sciences, Northeastern University, 360 Huntington Avenue, Boston, MA, 02115, United States; <email>a.sathyanarayana@northeastern.edu</email></corresp><fn fn-type="equal" id="equal-contrib1"><label>*</label><p>these authors contributed equally</p></fn></author-notes><pub-date pub-type="collection"><year>2026</year></pub-date><pub-date pub-type="epub"><day>30</day><month>3</month><year>2026</year></pub-date><volume>14</volume><elocation-id>e64144</elocation-id><history><date date-type="received"><day>09</day><month>07</month><year>2024</year></date><date date-type="rev-recd"><day>05</day><month>01</month><year>2026</year></date><date date-type="accepted"><day>06</day><month>01</month><year>2026</year></date></history><copyright-statement>&#x00A9; Aarti Sathyanarayana, Ohida Binte Amin, Jennie An, Jukka Pekka Onnela. Originally published in JMIR mHealth and uHealth (<ext-link ext-link-type="uri" xlink:href="https://mhealth.jmir.org">https://mhealth.jmir.org</ext-link>), 30.3.2026. </copyright-statement><copyright-year>2026</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR mHealth and uHealth, is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://mhealth.jmir.org/">https://mhealth.jmir.org/</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://mhealth.jmir.org/2026/1/e64144"/><abstract><sec><title>Background</title><p>College-aged students face persistent academic and social stress that adversely affects their mental and physical health. Digital phenotyping with wearable devices enables real-time stress monitoring from continuous physiological signals, supporting just-in-time therapeutic interventions to improve student well-being. Despite rapid advances in wearables and analytical methods, it remains unclear which devices, physiological signals, and machine learning or deep learning approaches are most commonly used for stress detection in this population.</p></sec><sec><title>Objective</title><p>This study aimed to systematically review the literature to identify best practices and emerging trends in stress measurement using wearable technology and digital tools among college-aged students. We sought to evaluate commonalities in sensor types, datasets, and machine learning approaches used for stress detection.</p></sec><sec sec-type="methods"><title>Methods</title><p>A systematic search was conducted across medical and computer science databases, including Embase, PubMed, IEEE Xplore, and ACM Digital Library, for studies published between January 2020 and December 2025. Studies were included if they examined psychological stress detection using wearable or digital tools among college-aged students and were excluded if they focused on nonpsychological stress, were reviews or prototypes without a defined study population, or lacked clear population information. Two reviewers independently screened studies and extracted data on the wearable sensors, physiological signals, datasets, and modeling approaches to summarize trends in stress prediction.</p></sec><sec sec-type="results"><title>Results</title><p>A total of 134 studies met the inclusion criteria and were included in the review from the original 792 papers. Electrodermal activity was the most frequently used physiological signal, appearing in 57.5% (n=77) of studies, and wrist-worn wearable devices were the predominant sensing modality. Among studies that compared algorithms, support vector machines were identified as the most commonly applied and best-performing model in 33.3% (n=45) of cases. Overall, 62.8% (n=84) of included studies relied on preexisting datasets, and approximately 80% (n=67) of those used the Wearable Stress and Affect Detection dataset, which contains only 15 participants. Demographic reporting was inconsistent, as 27.6% (n=37) of studies did not report sex distribution, and only 4 studies justified the sample size. The use of temporal modeling algorithms was limited, despite their importance for capturing the dynamic, time-varying nature of stress. This review highlights persistent gaps and underscores the need for more diverse datasets and advanced modeling approaches to improve stress detection accuracy.</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>Our review innovatively synthesizes wearable-based stress detection research focused on college-aged students. Unlike prior reviews that aggregate heterogeneous populations or focus primarily on algorithmic performance, this review focused on wearable sensors, physiological signals, modeling approaches, and methodological quality to identify persistent gaps limiting real-world deployment. These findings inform the development of more generalizable monitoring systems to support early mental health intervention in students.</p></sec></abstract><kwd-group><kwd>digital phenotyping</kwd><kwd>wearable technology</kwd><kwd>stress detection</kwd><kwd>machine learning</kwd><kwd>college students</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><p>With the widespread adoption of wearable devices, numerous stress monitoring frameworks have been designed specifically for undergraduate students [<xref ref-type="bibr" rid="ref1">1</xref>-<xref ref-type="bibr" rid="ref3">3</xref>], given their heightened susceptibility to psychological stress. This need is underscored by findings that over 80% of undergraduate students report experiencing significant stress related to their academic life [<xref ref-type="bibr" rid="ref4">4</xref>]. University life can be particularly overwhelming, as many students experience independent living for the first time while navigating self-care and decision-making [<xref ref-type="bibr" rid="ref5">5</xref>]. While positive stress can sometimes enhance academic performance, persistent and long-lasting chronic stress can negatively impact both mental and physical health [<xref ref-type="bibr" rid="ref6">6</xref>]. By proactively managing stress, individuals can mitigate the risk of stress-related health issues, including cardiovascular problems, gastrointestinal issues, mental health disorders, substance abuse, and chronic diseases such as diabetes or hypertension [<xref ref-type="bibr" rid="ref7">7</xref>]. Stress also significantly disrupts sleep [<xref ref-type="bibr" rid="ref8">8</xref>], social interactions [<xref ref-type="bibr" rid="ref9">9</xref>], and academic performance [<xref ref-type="bibr" rid="ref10">10</xref>], contributing to insomnia [<xref ref-type="bibr" rid="ref11">11</xref>], anxiety [<xref ref-type="bibr" rid="ref12">12</xref>], and a weakened immune system [<xref ref-type="bibr" rid="ref13">13</xref>]. Digital phenotyping of stress, leveraging wearable and mobile technologies, enables just-in-time stress management solutions that help prevent chronic stress from compromising long-term health.</p><p>In recent years, the use of consumer wearables to monitor physical activity [<xref ref-type="bibr" rid="ref14">14</xref>] and other lifestyle traits [<xref ref-type="bibr" rid="ref15">15</xref>] has become more prevalent. For example, many commercial consumer wearables are being used to keep track of and improve upon fitness regimens [<xref ref-type="bibr" rid="ref16">16</xref>]. With this increased availability of wearables comes the possibility for real-time health management using these commercial devices that are more convenient and lightweight [<xref ref-type="bibr" rid="ref17">17</xref>]. The use of wearables to passively monitor physiological signals and the subsequent analysis using various machine learning and deep learning models brings enormous benefits for health management [<xref ref-type="bibr" rid="ref18">18</xref>]. By passively tracking heart rate (HR) or heart rate variability (HRV), skin temperature, electrodermal activity (EDA), electroencephalogram, electrocardiogram (ECG), acceleration, and other physiological variables, smartphones and wearable sensors can provide features related to signs indicative of poor mental health [<xref ref-type="bibr" rid="ref19">19</xref>]. Stress is reflected in the body with increased EDA or HR, reflecting the autonomic nervous system and hypothalamic-pituitary-adrenal axis activity [<xref ref-type="bibr" rid="ref20">20</xref>]. Many studies have tracked these biosignals with commercial digital tools to build models to measure stress [<xref ref-type="bibr" rid="ref21">21</xref>]. In this review, we examine the trends in the current use of these digital tools to measure stress.</p><p>Stress assessment using wearable and digital technologies has been conducted across both controlled laboratory experiments and real-world, free-living conditions. In laboratory settings, studies commonly use well-established stress elicitation tasks [<xref ref-type="bibr" rid="ref22">22</xref>] with resting periods used as baselines. Commonly used tasks include the Trier Social Stress Test (TSST), mental arithmetic tasks [<xref ref-type="bibr" rid="ref23">23</xref>] (eg, the Montreal Imaging Stress Task [<xref ref-type="bibr" rid="ref24">24</xref>]), the Stroop color-word test, public speaking, startle response tests, cold pressor tests, and stress-inducing video stimuli [<xref ref-type="bibr" rid="ref25">25</xref>]. Across these studies, researchers used varying combinations of physiological signals and derived diverse feature sets following preprocessing steps such as artifact removal, signal normalization, and feature selection [<xref ref-type="bibr" rid="ref26">26</xref>]. In contrast, stress monitoring in free-living environments relies on self-reported stress measures alongside passive and unobtrusive sensing approaches that capture daily physiological and behavioral patterns using wearable devices and smartphones [<xref ref-type="bibr" rid="ref27">27</xref>]. These approaches vary widely in sensor availability, feature extraction methods, and contextual information, leading to substantial heterogeneity in how stress is represented and quantified across wearables and digital tools.</p><p>Alongside variability in study design, stress capture methods, and physiological sensing, approaches for stress prediction differ markedly across studies. Both traditional machine learning [<xref ref-type="bibr" rid="ref28">28</xref>,<xref ref-type="bibr" rid="ref29">29</xref>] and deep learning [<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref31">31</xref>] models have been applied to physiological time-series data to identify stress episodes and enable just-in-time interventions. However, it remains unclear which modeling paradigms are most appropriate for different physiological signals and smartphone-derived active and passive sensing data, how model architectures should be designed to capture temporal stress dynamics, and whether increased model complexity consistently yields performance gains. These methodological challenges hinder the translation of wearable-based stress detection systems into practical tools for continuous monitoring and personalized support in college-aged populations, underscoring the need for systematic evidence synthesis and clearer methodological pathways for future research.</p><p>This review aims to identify trends in current research and highlight areas for improvement that future researchers should focus on. There is a need to understand which algorithms perform best, which wearables are most used, and which signals are most informative. The topic of this review is identifying moments of high stress using digital tools and ubiquitous data in college-aged students. We examine both machine learning and deep learning advancements in the field, as well as comparisons of methods, where a scoping review is the most appropriate synthesis method to address the stated objectives. Our population of interest includes college students aged 18&#x2010;24 years. Publication dates of interest include conference and journal papers published between 2020 and 2025, as we focus on advancements in the field, including newer wearable devices and algorithms. We are also narrowing our focus to college students, as university is a particularly stressful place where their health and lifestyle habits are likely to fluctuate [<xref ref-type="bibr" rid="ref32">32</xref>]. Academic stress is directly linked to health crises such as anxiety and depression, indicating an opportunity to monitor stress and prevent health from deteriorating [<xref ref-type="bibr" rid="ref33">33</xref>]. In this scoping review, we summarize the wearables used, signals measured, and algorithms performed to measure stress. We then discuss trends in data and practices across papers. We conduct a quality assessment of all included studies. We also provide an overview of the results and a discussion of limitations and future possibilities for stress measurement. As a result, this scoping review aims to synthesize recent research on wearable or digital tool&#x2013;based stress detection among college-aged students by summarizing the sensing technologies used, the physiological and behavioral signals measured, the machine learning and deep learning models applied, and key methodological practices, to identify current trends, limitations, and directions for future research.</p></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Overview</title><p>We conducted a scoping review to characterize current research on stress detection using wearable and digital tools among college-aged students. This review synthesizes studies published between January 2020 and December 2025 to summarize commonly used wearable devices, physiological signals, datasets, and machine learning or deep learning approaches for identifying high-stress moments. By organizing existing methods and conducting a quality assessment, this review provides an overview of methodological practices and highlights areas for future research in wearable-based stress measurement. This scoping review adhered to the methodological framework proposed by Arksey and O&#x2019;Malley [<xref ref-type="bibr" rid="ref34">34</xref>], which includes identifying the research question, identifying relevant studies, study selection, charting the data, and collating, summarizing, and reporting the results. Finally, this scoping review was conducted and reported in accordance with the PRISMA-ScR (Preferred Reporting Items for Systematic Reviews and Meta-Analyses extension for Scoping Reviews) guidelines to ensure transparency and reproducibility [<xref ref-type="bibr" rid="ref35">35</xref>].</p></sec><sec id="s2-2"><title>Protocol and Registration</title><p>No formal review protocol was registered for this scoping review, as the objective was to map the scope and characteristics of existing evidence in stress prediction research using wearable technology.</p></sec><sec id="s2-3"><title>Eligibility Criteria</title><p>We defined eligibility criteria to ensure that only relevant and methodologically appropriate studies were included in this review. Studies were included if they measured or classified psychological stress using physiological signals from a tool, wearable, or sensor. Only experimental or observational studies published in English were considered. The target population was college students aged 18&#x2010;24 years. Studies that partially included this age range were eligible if they explicitly mentioned students as a distinct group or if the mean age, along with the SD, fell within the target population. Studies were excluded if they focused on nonpsychological stress (eg, mechanical stress), were review papers, extended abstracts, or prototype descriptions without a defined study population. Papers without clear population details or those identifying participants solely by employment (eg, &#x201C;office workers&#x201D; or &#x201C;hospitalized patients&#x201D;) were also excluded.</p></sec><sec id="s2-4"><title>Information Sources</title><p>We searched IEEE Xplore, ACM Digital Library, PubMed, and Embase for conference and journal papers covering studies published between January 2020 and December 2025, a time frame selected to capture recent developments in wearable sensing technologies and stress detection methodologies.</p></sec><sec id="s2-5"><title>Search</title><p>We used a combination of terms related to the key concepts of psychological stress, wearable devices, and sensors (full search per database is provided in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>). We extracted each database searched and the platform used, including IEEE Xplore, ACM Digital Library, PubMed, and Embase, in accordance with PRISMA-S (PRISMA literature search extension) [<xref ref-type="bibr" rid="ref36">36</xref>], and all databases were searched independently rather than through a multidatabase platform. No multidatabase searching or study registry searching was conducted. No additional online resources (eg, tables of contents, print conference proceedings, and websites) were browsed. No additional search methods were used, including citation searching, contacting authors or experts, or setting up citation alerts. The full search strategies for each database are provided in <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>, including the specification that no filters or limits other than language (English) and publication date (January 2020 to December 2025) were applied. Search strategies were developed with input from 2 academic librarians; however, search strategies from prior reviews were not reused, and no formal peer review of the search strategy was conducted. No additional methods were used to update the search. Therefore, searches were limited to studies published in English within the specified date range. No restrictions were applied based on study design. All retrieved records were initially screened. Following screening, records were imported into Rayyan (Rayyan Systems Inc) [<xref ref-type="bibr" rid="ref37">37</xref>], where duplicate entries were identified and removed. The deduplicated set of records was then used for abstract and full-text screening.</p></sec><sec id="s2-6"><title>Selection of Sources of Evidence</title><p>Two independent reviewers screened all records using a 2-stage selection process. Studies were checked for eligibility by 2 reviewers independently screening titles and abstracts. This first round of filtering focused on relevance. Abstracts were also screened for population. Some papers did not mention population in the abstract and were thus moved to full-text screening. This resulted in 261 papers for full-text screening. During this second round of filtering, studies were also checked for eligibility by 2 researchers independently reviewing the full text. Disagreements at any stage of eligibility and filtering were resolved by the 2 reviewers discussing their reasons for either inclusion, exclusion, or neither. Full agreement was reached for abstract and full-text screening, leading to the final inclusion of 134 papers.</p></sec><sec id="s2-7"><title>Data Charting</title><p>A standardized data-charting form was jointly developed by 2 reviewers to identify and extract relevant information aligned with the review objectives. The form was pilot-tested on a subset of included studies and refined iteratively to ensure completeness and consistency. Two reviewers independently charted data from all eligible studies, compared their entries, and resolved discrepancies through discussion. All data were extracted directly from the published papers, and no additional information was sought from study authors.</p></sec><sec id="s2-8"><title>Data Items</title><p>To extract consistent information from each paper, we conducted systematic data extraction as outlined in <xref ref-type="table" rid="table1">Tables 1</xref><xref ref-type="table" rid="table2"/>-<xref ref-type="table" rid="table3">3</xref>. Extracted variables included study details (title, authors, publication date, study purpose, and data collection duration), sample characteristics (age, sex, sample size, and demographic information), sensor type, and all available feature categories used in the study (sleep, physiological signals, calorie intake or expenditure, phone use, activity, location, and survey or EMA data). For studies conducting algorithm comparisons, we additionally extracted the types of signals analyzed, devices used, algorithms tested, performance measures, best-performing algorithm, validation strategy, and outcome measures.</p><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Summary characteristics of 134 included studies.</p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Study</td><td align="left" valign="bottom">Sample (n)</td><td align="left" valign="bottom">Sex</td><td align="left" valign="bottom">Age (years), mean (SD)</td><td align="left" valign="bottom">Sleep</td><td align="left" valign="bottom">Physiological signals</td><td align="left" valign="bottom">Calorie intake or expenditure</td><td align="left" valign="bottom">Phone use</td><td align="left" valign="bottom">Activity</td><td align="left" valign="bottom">Location</td><td align="left" valign="bottom">Survey</td><td align="left" valign="bottom">Total feature types</td></tr></thead><tbody><tr><td align="left" valign="top">Bellante et al [<xref ref-type="bibr" rid="ref38">38</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Faro and Giordano [<xref ref-type="bibr" rid="ref39">39</xref>]</td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Faro et al [<xref ref-type="bibr" rid="ref40">40</xref>]</td><td align="left" valign="top">31</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Iranfar et al [<xref ref-type="bibr" rid="ref41">41</xref>]</td><td align="left" valign="top">95</td><td align="left" valign="top">95 males</td><td align="left" valign="top">20.43 (2.17)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Mohammadi et al [<xref ref-type="bibr" rid="ref42">42</xref>]</td><td align="left" valign="top">18</td><td align="left" valign="top">5 females and 13 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Mustafa et al [<xref ref-type="bibr" rid="ref43">43</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Arsalan and Majid [<xref ref-type="bibr" rid="ref44">44</xref>]</td><td align="left" valign="top">40</td><td align="left" valign="top">20 females and 20 males</td><td align="left" valign="top">24.86 (6.69)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Li and Sano [<xref ref-type="bibr" rid="ref45">45</xref>]</td><td align="left" valign="top">239</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Can et al [<xref ref-type="bibr" rid="ref27">27</xref>]</td><td align="left" valign="top">14</td><td align="left" valign="top">5 females and 9 males</td><td align="left" valign="top">23.5 (N/A)<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup></td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Cheadle et al [<xref ref-type="bibr" rid="ref46">46</xref>]</td><td align="left" valign="top">100</td><td align="left" valign="top">61 females and 39 males</td><td align="left" valign="top">20.4 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Chen et al [<xref ref-type="bibr" rid="ref47">47</xref>]</td><td align="left" valign="top">30</td><td align="left" valign="top">20 females and 10 males</td><td align="left" valign="top">23 (NA)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Gupta et al [<xref ref-type="bibr" rid="ref48">48</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Panganiban and de Leon [<xref ref-type="bibr" rid="ref49">49</xref>]</td><td align="left" valign="top">36</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">21.5 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Gasparini et al [<xref ref-type="bibr" rid="ref50">50</xref>]</td><td align="left" valign="top">36</td><td align="left" valign="top">14 females and 22 males</td><td align="left" valign="top">24.7 (3.3)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Azgomi et al [<xref ref-type="bibr" rid="ref51">51</xref>]</td><td align="left" valign="top">20</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Yu and Sano [<xref ref-type="bibr" rid="ref31">31</xref>]</td><td align="left" valign="top">243</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Han et al [<xref ref-type="bibr" rid="ref52">52</xref>]</td><td align="left" valign="top">17</td><td align="left" valign="top">4 females and 13 males</td><td align="left" valign="top">24 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Wu et al [<xref ref-type="bibr" rid="ref53">53</xref>]</td><td align="left" valign="top">264</td><td align="left" valign="top">113 females and 151 males</td><td align="left" valign="top">22.8 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Jelsma et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">100</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Lai et al [<xref ref-type="bibr" rid="ref55">55</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Liakopoulos et al [<xref ref-type="bibr" rid="ref56">56</xref>]</td><td align="left" valign="top">Multiple datasets</td><td align="left" valign="top">Multiple datasets</td><td align="left" valign="top">Multiple datasets</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Li and Sano [<xref ref-type="bibr" rid="ref57">57</xref>]</td><td align="left" valign="top">239</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Hssayeni and Ghoraani [<xref ref-type="bibr" rid="ref58">58</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Gil-Martin et al [<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Han et al [<xref ref-type="bibr" rid="ref60">60</xref>]</td><td align="left" valign="top">20</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Mishra et al [<xref ref-type="bibr" rid="ref61">61</xref>]</td><td align="left" valign="top">27</td><td align="left" valign="top">15 females and 12 males</td><td align="left" valign="top">23 (3.24)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Mishra et al [<xref ref-type="bibr" rid="ref26">26</xref>]</td><td align="left" valign="top">90</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">Graduate and undergraduate students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Momeni et al [<xref ref-type="bibr" rid="ref62">62</xref>]</td><td align="left" valign="top">60</td><td align="left" valign="top">60 males</td><td align="left" valign="top">20.43 (2.17)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Rashid et al [<xref ref-type="bibr" rid="ref63">63</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Bobade and Vani [<xref ref-type="bibr" rid="ref18">18</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Yannam et al [<xref ref-type="bibr" rid="ref64">64</xref>]</td><td align="left" valign="top">70</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">Undergraduate</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">5</td></tr><tr><td align="left" valign="top">Pakhomov et al [<xref ref-type="bibr" rid="ref65">65</xref>]</td><td align="left" valign="top">18</td><td align="left" valign="top">14 females and 4 males</td><td align="left" valign="top">20.1 (2.01)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Holder et al [<xref ref-type="bibr" rid="ref66">66</xref>]</td><td align="left" valign="top">11</td><td align="left" valign="top">10 females and 1 male</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Elzeiny and Qaraqe [<xref ref-type="bibr" rid="ref67">67</xref>]</td><td align="left" valign="top">22</td><td align="left" valign="top">5 females and 17 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Heo et al [<xref ref-type="bibr" rid="ref68">68</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Kar et al [<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Prashant et al [<xref ref-type="bibr" rid="ref70">70</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 ( 2.4 )</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Samyoun et al [<xref ref-type="bibr" rid="ref71">71</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">3 females and 12 males</td><td align="left" valign="top">27.5 ( 2.4 )</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Silva et al [<xref ref-type="bibr" rid="ref72">72</xref>]</td><td align="left" valign="top">82</td><td align="left" valign="top">63 females and 19 males</td><td align="left" valign="top">22.13 (5.55)</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Islam et al [<xref ref-type="bibr" rid="ref73">73</xref>]</td><td align="left" valign="top">20</td><td align="left" valign="top">7 females, 12 males, and 1 nonbinary</td><td align="left" valign="top">22 (N/A)</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Vidal et al [<xref ref-type="bibr" rid="ref32">32</xref>]</td><td align="left" valign="top">49</td><td align="left" valign="top">25 females and 24 males</td><td align="left" valign="top">18.1 (N/A)</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Wu et al [<xref ref-type="bibr" rid="ref74">74</xref>]</td><td align="left" valign="top">169</td><td align="left" valign="top">81 females and 88 males</td><td align="left" valign="top">22.8 (6.2)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Mitro et al [<xref ref-type="bibr" rid="ref75">75</xref>]</td><td align="left" valign="top">30</td><td align="left" valign="top">22 males and 8 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Zhu et al [<xref ref-type="bibr" rid="ref28">28</xref>]</td><td align="left" valign="top">112</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Tutunji et al [<xref ref-type="bibr" rid="ref76">76</xref>]</td><td align="left" valign="top">84</td><td align="left" valign="top">32 males and 52 females</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">5</td></tr><tr><td align="left" valign="top">Lange et al [<xref ref-type="bibr" rid="ref77">77</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Abdul et al [<xref ref-type="bibr" rid="ref78">78</xref>]</td><td align="left" valign="top">20</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Almadhor et al [<xref ref-type="bibr" rid="ref79">79</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Vos e al [<xref ref-type="bibr" rid="ref29">29</xref>]</td><td align="left" valign="top">136</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">13</td></tr><tr><td align="left" valign="top">Mai and Chung [<xref ref-type="bibr" rid="ref80">80</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">30 (7)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Sepanloo et al [<xref ref-type="bibr" rid="ref81">81</xref>]</td><td align="left" valign="top">12</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">29.6 (10.1)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Gedam et al [<xref ref-type="bibr" rid="ref2">2</xref>]</td><td align="left" valign="top">200</td><td align="left" valign="top">128 male and 72 female</td><td align="left" valign="top">23 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Darwish et al [<xref ref-type="bibr" rid="ref82">82</xref>]</td><td align="left" valign="top">1017</td><td align="left" valign="top">496 males and 454 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Lim et al [<xref ref-type="bibr" rid="ref83">83</xref>]</td><td align="left" valign="top">5</td><td align="left" valign="top">4 males and 1 female</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Bloomfield et al [<xref ref-type="bibr" rid="ref3">3</xref>]</td><td align="left" valign="top">525</td><td align="left" valign="top">144 males and 381 females</td><td align="left" valign="top">22 (N/A)</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Nazeer et al [<xref ref-type="bibr" rid="ref84">84</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Almadhor et al [<xref ref-type="bibr" rid="ref85">85</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Str&#x017E;inar et al [<xref ref-type="bibr" rid="ref86">86</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Chen and Lee [<xref ref-type="bibr" rid="ref30">30</xref>]</td><td align="left" valign="top">30</td><td align="left" valign="top">6 males and 24 females</td><td align="left" valign="top">20.4 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Feng et al [<xref ref-type="bibr" rid="ref87">87</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Xuanzhi et al [<xref ref-type="bibr" rid="ref88">88</xref>]</td><td align="left" valign="top">15+</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Vidal et al [<xref ref-type="bibr" rid="ref89">89</xref>]</td><td align="left" valign="top">55</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">18.5 (N/A)</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Fauzi et al [<xref ref-type="bibr" rid="ref90">90</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Tazarv et al [<xref ref-type="bibr" rid="ref91">91</xref>]</td><td align="left" valign="top">20</td><td align="left" valign="top">13 males and 7 females</td><td align="left" valign="top">25 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Alfredo et al [<xref ref-type="bibr" rid="ref92">92</xref>]</td><td align="left" valign="top">35</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Su et al [<xref ref-type="bibr" rid="ref93">93</xref>]</td><td align="left" valign="top">18403</td><td align="left" valign="top">8565 males and 9838 females</td><td align="left" valign="top">118.5 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Wang et al [<xref ref-type="bibr" rid="ref94">94</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Can and Andr&#x00E9; [<xref ref-type="bibr" rid="ref95">95</xref>]</td><td align="left" valign="top">14</td><td align="left" valign="top">9 males and 5 females</td><td align="left" valign="top">23 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Prajod et al [<xref ref-type="bibr" rid="ref96">96</xref>]</td><td align="left" valign="top">135</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Ganesan et al [<xref ref-type="bibr" rid="ref97">97</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">7</td></tr><tr><td align="left" valign="top">Sun et al [<xref ref-type="bibr" rid="ref98">98</xref>]</td><td align="left" valign="top">21</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">23 (2.91)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Neigel et al [<xref ref-type="bibr" rid="ref99">99</xref>]</td><td align="left" valign="top">103</td><td align="left" valign="top">91 males and 12 females</td><td align="left" valign="top">21.8 (1.9)</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Pogliaghi et al [<xref ref-type="bibr" rid="ref100">100</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Jaiswal et al [<xref ref-type="bibr" rid="ref101">101</xref>]</td><td align="left" valign="top">64</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Rashid et al [<xref ref-type="bibr" rid="ref102">102</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">7</td></tr><tr><td align="left" valign="top">Narwat et al [<xref ref-type="bibr" rid="ref103">103</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Kafkov&#x00E1; et al [<xref ref-type="bibr" rid="ref104">104</xref>]</td><td align="left" valign="top">15+</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Lopez et al [<xref ref-type="bibr" rid="ref105">105</xref>]</td><td align="left" valign="top">166</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">21 (N/A)</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">5</td></tr><tr><td align="left" valign="top">Wilfred et al [<xref ref-type="bibr" rid="ref106">106</xref>]</td><td align="left" valign="top">25</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Jaiswal et al [<xref ref-type="bibr" rid="ref107">107</xref>]</td><td align="left" valign="top">60</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Gaitan-Padilla et al [<xref ref-type="bibr" rid="ref108">108</xref>]</td><td align="left" valign="top">12</td><td align="left" valign="top">5 males and 7 females</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Gupta et al [<xref ref-type="bibr" rid="ref109">109</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Beierle and Pryss [<xref ref-type="bibr" rid="ref110">110</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Masrur et al [<xref ref-type="bibr" rid="ref111">111</xref>]</td><td align="left" valign="top">15+</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Sakanti et al [<xref ref-type="bibr" rid="ref112">112</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Shedage et al [<xref ref-type="bibr" rid="ref113">113</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">7</td></tr><tr><td align="left" valign="top">Gaitan-Padilla et al [<xref ref-type="bibr" rid="ref114">114</xref>]</td><td align="left" valign="top">5</td><td align="left" valign="top">4 males and 1 female</td><td align="left" valign="top">22.6 (0.55)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Tanwar et al [<xref ref-type="bibr" rid="ref115">115</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Gullapalli et al [<xref ref-type="bibr" rid="ref116">116</xref>]</td><td align="left" valign="top">18</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">20 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Sadruddin et al [<xref ref-type="bibr" rid="ref117">117</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Jahanjoo et al [<xref ref-type="bibr" rid="ref118">118</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Parousidou et al [<xref ref-type="bibr" rid="ref119">119</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Karpagam et al [<xref ref-type="bibr" rid="ref120">120</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Sethia et al [<xref ref-type="bibr" rid="ref121">121</xref>]</td><td align="left" valign="top">36</td><td align="left" valign="top">32 males and 4 females</td><td align="left" valign="top">21 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Hasanpoor et al [<xref ref-type="bibr" rid="ref122">122</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Benita et al [<xref ref-type="bibr" rid="ref123">123</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Hsu [<xref ref-type="bibr" rid="ref124">124</xref>]</td><td align="left" valign="top">10</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Carmisciano et al [<xref ref-type="bibr" rid="ref125">125</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Warrier et al [<xref ref-type="bibr" rid="ref126">126</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">5</td></tr><tr><td align="left" valign="top">Calbert and Tonekaboni [<xref ref-type="bibr" rid="ref127">127</xref>]</td><td align="left" valign="top">5</td><td align="left" valign="top">2 males and 3 females</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Hoang et al [<xref ref-type="bibr" rid="ref1">1</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Kumar et al [<xref ref-type="bibr" rid="ref128">128</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Hasanpoor et al [<xref ref-type="bibr" rid="ref129">129</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Le et al [<xref ref-type="bibr" rid="ref130">130</xref>]</td><td align="left" valign="top">10</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Fernandez et al [<xref ref-type="bibr" rid="ref131">131</xref>]</td><td align="left" valign="top">30</td><td align="left" valign="top">15 males and 15 females</td><td align="left" valign="top">28 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Tanwar et al [<xref ref-type="bibr" rid="ref132">132</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Huang et al [<xref ref-type="bibr" rid="ref133">133</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Oh et al [<xref ref-type="bibr" rid="ref134">134</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Thapa et al [<xref ref-type="bibr" rid="ref135">135</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Abdelfattah et al [<xref ref-type="bibr" rid="ref136">136</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Tsiampa et al [<xref ref-type="bibr" rid="ref137">137</xref>]</td><td align="left" valign="top"/><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Fazeli et al [<xref ref-type="bibr" rid="ref138">138</xref>]</td><td align="left" valign="top">14</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">8</td></tr><tr><td align="left" valign="top">Subathra and Malarvizhi [<xref ref-type="bibr" rid="ref139">139</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Shikha et al [<xref ref-type="bibr" rid="ref140">140</xref>]</td><td align="left" valign="top">36</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">20 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Andreas et al [<xref ref-type="bibr" rid="ref141">141</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Lee et al [<xref ref-type="bibr" rid="ref21">21</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Kasnesis et al [<xref ref-type="bibr" rid="ref142">142</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Ciharova et al [<xref ref-type="bibr" rid="ref143">143</xref>]</td><td align="left" valign="top">42</td><td align="left" valign="top">13 males and 29 females</td><td align="left" valign="top">20.79 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Darwish et al [<xref ref-type="bibr" rid="ref144">144</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Nuamah [<xref ref-type="bibr" rid="ref145">145</xref>]</td><td align="left" valign="top">32</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">25.2 (2.3)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Saylam and &#x0130;ncel [<xref ref-type="bibr" rid="ref19">19</xref>]</td><td align="left" valign="top">700</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">4</td></tr><tr><td align="left" valign="top">Sa-nguannarm et al [<xref ref-type="bibr" rid="ref146">146</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">6</td></tr><tr><td align="left" valign="top">Nelson et al [<xref ref-type="bibr" rid="ref147">147</xref>]</td><td align="left" valign="top">103</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Dahal et al [<xref ref-type="bibr" rid="ref148">148</xref>]</td><td align="left" valign="top">15</td><td align="left" valign="top">12 males and 3 females</td><td align="left" valign="top">27.5 (2.4)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Aqajari et al [<xref ref-type="bibr" rid="ref149">149</xref>]</td><td align="left" valign="top">11</td><td align="left" valign="top">4 males and 7 females</td><td align="left" valign="top">22.91 (5.05)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Jiao et al [<xref ref-type="bibr" rid="ref150">150</xref>]</td><td align="left" valign="top">32</td><td align="left" valign="top">14 males and 18 females</td><td align="left" valign="top">22.69 (3.73)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Yuting and Rashid [<xref ref-type="bibr" rid="ref33">33</xref>]</td><td align="left" valign="top">502</td><td align="left" valign="top">476 males and 26 females</td><td align="left" valign="top">College students</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Lotfi et al [<xref ref-type="bibr" rid="ref151">151</xref>]</td><td align="left" valign="top">168</td><td align="left" valign="top">168 females</td><td align="left" valign="top">122.5 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Belwafi et al [<xref ref-type="bibr" rid="ref23">23</xref>]</td><td align="left" valign="top">36</td><td align="left" valign="top">8 males and 28 females</td><td align="left" valign="top">21 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1</td></tr><tr><td align="left" valign="top">Patan&#x00E8; et al [<xref ref-type="bibr" rid="ref152">152</xref>]</td><td align="left" valign="top">16</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">College students</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Subathra et al [<xref ref-type="bibr" rid="ref153">153</xref>]</td><td align="left" valign="top">46</td><td align="left" valign="top">40 males and 6 females</td><td align="left" valign="top">22 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Li et al [<xref ref-type="bibr" rid="ref25">25</xref>]</td><td align="left" valign="top">177</td><td align="left" valign="top">89 males and 88 females</td><td align="left" valign="top">20.37 (2.97)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">3</td></tr><tr><td align="left" valign="top">Van der Mee et al [<xref ref-type="bibr" rid="ref154">154</xref>]</td><td align="left" valign="top">95</td><td align="left" valign="top">15 males and 80 females</td><td align="left" valign="top">20 (N/A)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top">2</td></tr><tr><td align="left" valign="top">Rosenbach et al [<xref ref-type="bibr" rid="ref24">24</xref>]</td><td align="left" valign="top">60</td><td align="left" valign="top">20 males and 40 females</td><td align="left" valign="top">27.5 (5.6)</td><td align="left" valign="top"/><td align="left" valign="top">&#x2714;</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">3</td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>Not available.</p></fn></table-wrap-foot></table-wrap><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>Details for studies conducting algorithm comparisons.</p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Study</td><td align="left" valign="bottom">Device used</td><td align="left" valign="bottom">Physiological or nonphysiological signals</td><td align="left" valign="bottom">Algorithm</td><td align="left" valign="bottom">Performance measure</td><td align="left" valign="bottom">Best performing algorithm</td><td align="left" valign="bottom">Validation</td></tr></thead><tbody><tr><td align="left" valign="top">Bellante et al [<xref ref-type="bibr" rid="ref38">38</xref>]</td><td align="left" valign="top">Wrist and chest devices</td><td align="left" valign="top">BVP<sup><xref ref-type="table-fn" rid="table2fn1">a</xref></sup>, EDA<sup><xref ref-type="table-fn" rid="table2fn2">b</xref></sup>, and ESP<sup><xref ref-type="table-fn" rid="table2fn3">c</xref></sup></td><td align="left" valign="top">DT<sup><xref ref-type="table-fn" rid="table2fn4">d</xref></sup>, bagging DT, RF<sup><xref ref-type="table-fn" rid="table2fn5">e</xref></sup>, Extra Trees, AdaBoost<sup><xref ref-type="table-fn" rid="table2fn6">f</xref></sup> DT, SVM<sup><xref ref-type="table-fn" rid="table2fn7">g</xref></sup>, KNN<sup><xref ref-type="table-fn" rid="table2fn8">h</xref></sup>, LR<sup><xref ref-type="table-fn" rid="table2fn9">i</xref></sup>, and LDA<sup><xref ref-type="table-fn" rid="table2fn10">j</xref></sup></td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">SVM</td><td align="left" valign="top">Leave-one-out cross-validation (LOOCV)</td></tr><tr><td align="left" valign="top">Iranfar et al [<xref ref-type="bibr" rid="ref41">41</xref>]</td><td align="left" valign="top">Biopac BioNomadix System</td><td align="left" valign="top">EDA, RESP<sup><xref ref-type="table-fn" rid="table2fn11">k</xref></sup>, ECG<sup><xref ref-type="table-fn" rid="table2fn12">l</xref></sup>, and PPG<sup><xref ref-type="table-fn" rid="table2fn13">m</xref></sup></td><td align="left" valign="top">LDA, SVM, RF, XGBoost<sup><xref ref-type="table-fn" rid="table2fn14">n</xref></sup>, Isolation forest, and Bayesian ridge algorithm</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">XGBoost</td><td align="left" valign="top">Group k-fold cross-validation (k=10)</td></tr><tr><td align="left" valign="top">Mohammadi et al [<xref ref-type="bibr" rid="ref42">42</xref>]</td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table2fn15">o</xref></sup></td><td align="left" valign="top">ECG and EDA</td><td align="left" valign="top">KNN, DT, RF, SVM, and FCM<sup><xref ref-type="table-fn" rid="table2fn16">p</xref></sup></td><td align="left" valign="top">Accuracy, sensitivity, and specificity</td><td align="left" valign="top">KNN</td><td align="left" valign="top">Train and test split</td></tr><tr><td align="left" valign="top">Mustafa et al [<xref ref-type="bibr" rid="ref43">43</xref>]</td><td align="left" valign="top">SA9309M, AD8232, and MAX30205</td><td align="left" valign="top">HR<sup><xref ref-type="table-fn" rid="table2fn17">q</xref></sup>, SC<sup><xref ref-type="table-fn" rid="table2fn18">r</xref></sup>, and TEMP<sup><xref ref-type="table-fn" rid="table2fn19">s</xref></sup></td><td align="left" valign="top">ANN<sup><xref ref-type="table-fn" rid="table2fn20">t</xref></sup>, KNN, DT, and SVM</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">DT</td><td align="left" valign="top">Train and test split</td></tr><tr><td align="left" valign="top">Arsalan and Majid [<xref ref-type="bibr" rid="ref44">44</xref>]</td><td align="left" valign="top">MUSE EEG<sup><xref ref-type="table-fn" rid="table2fn21">u</xref></sup>, Shimmer GSR<sup><xref ref-type="table-fn" rid="table2fn22">v</xref></sup>, and PPG optical pulse clip</td><td align="left" valign="top">EEG, GSR, and PPG</td><td align="left" valign="top">KNN, DT, RF, MLP<sup><xref ref-type="table-fn" rid="table2fn23">w</xref></sup>, and SVM</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">SVM</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Can et al [<xref ref-type="bibr" rid="ref27">27</xref>]</td><td align="left" valign="top">Smartwatch and Empatica E4</td><td align="left" valign="top">EDA and HR</td><td align="left" valign="top">MLP, RF (n=100), KNN (n=3), SVM, and LR</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">RF and SVM</td><td align="left" valign="top">10-fold CV<sup><xref ref-type="table-fn" rid="table2fn24">x</xref></sup></td></tr><tr><td align="left" valign="top">Panganiban and de Leon [<xref ref-type="bibr" rid="ref49">49</xref>]</td><td align="left" valign="top">Smartphone and CorSense</td><td align="left" valign="top">PRV<sup><xref ref-type="table-fn" rid="table2fn25">y</xref></sup> from PPG</td><td align="left" valign="top">KNN, NN<sup><xref ref-type="table-fn" rid="table2fn26">z</xref></sup>, SVM, RF, and AdaBoost</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">RF</td><td align="left" valign="top">Stratified k-fold CV</td></tr><tr><td align="left" valign="top">Gasparini et al [<xref ref-type="bibr" rid="ref50">50</xref>]</td><td align="left" valign="top">Shimmer3 GSR</td><td align="left" valign="top">BVP</td><td align="left" valign="top">SVM linear kernel and CNN<sup><xref ref-type="table-fn" rid="table2fn27">aa</xref></sup></td><td align="left" valign="top">Accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">CNN</td><td align="left" valign="top">Train and test split</td></tr><tr><td align="left" valign="top">Yu and Sano [<xref ref-type="bibr" rid="ref31">31</xref>]</td><td align="left" valign="top">Wrist device and Android phone data</td><td align="left" valign="top">ACC<sup><xref ref-type="table-fn" rid="table2fn28">ab</xref></sup>, SC, and TEMP</td><td align="left" valign="top">LSTM<sup><xref ref-type="table-fn" rid="table2fn29">ac</xref></sup>, combination of LSTM and CNN</td><td align="left" valign="top">MAE<sup><xref ref-type="table-fn" rid="table2fn30">ad</xref></sup> and statistical analyses</td><td align="left" valign="top">LSTM</td><td align="left" valign="top">5-fold CV</td></tr><tr><td align="left" valign="top">Han et al [<xref ref-type="bibr" rid="ref52">52</xref>]</td><td align="left" valign="top">Shimmer3 ECG, Shimmer 3 GSR+, and Empatica E4</td><td align="left" valign="top">ECG, PPG, and GSR</td><td align="left" valign="top">KNN (k=1, 3, 5, 7, and 9), SVM, and Na&#x00EF;ve Bayes classifier</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">SVM</td><td align="left" valign="top">10-fold CV</td></tr><tr><td align="left" valign="top">Liakopoulos et al [<xref ref-type="bibr" rid="ref56">56</xref>]</td><td align="left" valign="top">Body sensors, wrist, and chest devices</td><td align="left" valign="top">ECG, EDA, and HR</td><td align="left" valign="top">CNN, SVM, KNN, RF, and NN</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">SVM</td><td align="left" valign="top">10-fold and LOSO<sup><xref ref-type="table-fn" rid="table2fn32">af</xref></sup> CV</td></tr><tr><td align="left" valign="top">Hssayeni and Ghoraani [<xref ref-type="bibr" rid="ref58">58</xref>]</td><td align="left" valign="top">Wrist and chest devices</td><td align="left" valign="top">RESP, ECG, EMA<sup><xref ref-type="table-fn" rid="table2fn33">ag</xref></sup>, EDA, TEMP, and ACC</td><td align="left" valign="top">Gradient tree boosting and CNN</td><td align="left" valign="top">MAE and r</td><td align="left" valign="top">CNN</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Mishra et al [<xref ref-type="bibr" rid="ref61">61</xref>]</td><td align="left" valign="top">Polar H7, Amulet wrist, and custom-made GSR sensor</td><td align="left" valign="top">HR, activity data, EMA prompts, and GSR</td><td align="left" valign="top">SVM and RF</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">SVM</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Mishra et al [<xref ref-type="bibr" rid="ref26">26</xref>]</td><td align="left" valign="top">Polar H10, Polar H7, and Empatica E4</td><td align="left" valign="top">HR and EDA</td><td align="left" valign="top">SVM and RF</td><td align="left" valign="top">Precision, recall, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">SVM with HR, RF for HR and EDA</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Bobade and Vani [<xref ref-type="bibr" rid="ref18">18</xref>]</td><td align="left" valign="top">Wrist and chest devices</td><td align="left" valign="top">ACC, ECG, BVP, TEMP, RESP, EMG<sup><xref ref-type="table-fn" rid="table2fn34">ah</xref></sup>, and EDA</td><td align="left" valign="top">KNN, LDA, RF, DT, AdaBoost, Kernel SVM, and ANN</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">ANN</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Elzeiny and Qaraqe [<xref ref-type="bibr" rid="ref67">67</xref>]</td><td align="left" valign="top">PPG sensor and Empatica E4</td><td align="left" valign="top">IBI<sup><xref ref-type="table-fn" rid="table2fn35">ai</xref></sup> and BVP</td><td align="left" valign="top">CNN, RF, Extra Trees, extremely randomized trees, and SVM</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">CNN and Extra Trees</td><td align="left" valign="top">CNN: 5-fold cross validation and ML:<sup><xref ref-type="table-fn" rid="table2fn36">aj</xref></sup> 10-fold cross validation</td></tr><tr><td align="left" valign="top">Prashant et al [<xref ref-type="bibr" rid="ref70">70</xref>]</td><td align="left" valign="top">Wrist and chest devices</td><td align="left" valign="top">ECG</td><td align="left" valign="top">LDA, RF (100 base estimators), SVM (Gaussian kernel), and ANN</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">RF</td><td align="left" valign="top">Train and test split</td></tr><tr><td align="left" valign="top">Silva et al [<xref ref-type="bibr" rid="ref72">72</xref>]</td><td align="left" valign="top">Microsoft Smartband 2</td><td align="left" valign="top">HR, SC, TEMP, calorie intake and expenditure, and sleep patterns</td><td align="left" valign="top">Logistic regression, NN, Na&#x00EF;ve Bayes, SVM, RF, and KNN</td><td align="left" valign="top">Sensitivity and specificity</td><td align="left" valign="top">NN</td><td align="left" valign="top">Train and test split</td></tr><tr><td align="left" valign="top">Islam et al [<xref ref-type="bibr" rid="ref73">73</xref>]</td><td align="left" valign="top">Fitbit Charge 2 and Android</td><td align="left" valign="top">HR, sleep, step count, GPS location, sound intensity, and light data</td><td align="left" valign="top">LR, KNN, SVM, and NN</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">SVM</td><td align="left" valign="top">10-fold CV</td></tr><tr><td align="left" valign="top">Zhu et al [<xref ref-type="bibr" rid="ref28">28</xref>]</td><td align="left" valign="top">Empatica E4, Affectiva Q Curve, and Shimmer3</td><td align="left" valign="top">EDA, PPG, and ECG</td><td align="left" valign="top">SVM, RF, KNN, Na&#x00EF;ve Bayes, and LR</td><td align="left" valign="top">Accuracy, recall, precision, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">SVM</td><td align="left" valign="top">LOSO and 10-fold CV</td></tr><tr><td align="left" valign="top">Sepanloo et al [<xref ref-type="bibr" rid="ref81">81</xref>]</td><td align="left" valign="top">Empatica E4 and Zephyr BioHarness 3 chest straps</td><td align="left" valign="top">HR, EDA, and TEMP</td><td align="left" valign="top">RF, gradient boosting classifier, and stacking models</td><td align="left" valign="top">Accuracy, precision, recall, <italic>F</italic><sub>1</sub>-score, and support</td><td align="left" valign="top">Stacking models</td><td align="left" valign="top">Stratified 5-fold CV</td></tr><tr><td align="left" valign="top">Gedam et al [<xref ref-type="bibr" rid="ref2">2</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, GSR, and TEMP</td><td align="left" valign="top">KNN, SVM, DT, RF, AdaBoost, XGBoost<sup><xref ref-type="table-fn" rid="table2fn14">n</xref></sup>, and gradient boosting</td><td align="left" valign="top">Accuracy, precision, recall, <italic>F</italic><sub>1</sub>-score, and AUC<sup><xref ref-type="table-fn" rid="table2fn37">ak</xref></sup></td><td align="left" valign="top">XGBoost</td><td align="left" valign="top">Train and test split and 10-fold CV</td></tr><tr><td align="left" valign="top">Alfredo et al [<xref ref-type="bibr" rid="ref92">92</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">TEMP, EDA, BVP, and salivary cortisol</td><td align="left" valign="top">SVM, AdaBoost, RF, LDA, and KNN</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">RF and KNN</td><td align="left" valign="top">Train and test split</td></tr><tr><td align="left" valign="top">Su et al [<xref ref-type="bibr" rid="ref93">93</xref>]</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">Self-reports (PSQI<sup><xref ref-type="table-fn" rid="table2fn38">al</xref></sup>, DASS-21<sup><xref ref-type="table-fn" rid="table2fn39">am</xref></sup>, CD-RISC<sup><xref ref-type="table-fn" rid="table2fn40">an</xref></sup>, and IPAQ)<sup><xref ref-type="table-fn" rid="table2fn41">ao</xref></sup></td><td align="left" valign="top">RF LR, SVM, and FNN<sup><xref ref-type="table-fn" rid="table2fn42">ap</xref></sup></td><td align="left" valign="top">Accuracy, specificity, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">RF</td><td align="left" valign="top">Train and test split</td></tr><tr><td align="left" valign="top">Wang et al [<xref ref-type="bibr" rid="ref94">94</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">HRV<sup><xref ref-type="table-fn" rid="table2fn31">ae</xref></sup></td><td align="left" valign="top">SVM and KNN</td><td align="left" valign="top">Accuracy, <italic>F</italic><sub>1</sub>-score, recall, and precision</td><td align="left" valign="top">SVM</td><td align="left" valign="top">10-fold CV</td></tr><tr><td align="left" valign="top">Prajod et al [<xref ref-type="bibr" rid="ref96">96</xref>]</td><td align="left" valign="top">RespiBAN, Empatica E4, TMSI Mobi, IOM biofeedback device, and Actiwave Cardio Monitor</td><td align="left" valign="top">ECG, EDA, BVP, and TEMP</td><td align="left" valign="top">RF, SVM, and MLP</td><td align="left" valign="top"><italic>F</italic><sub>1</sub>-score and accuracy</td><td align="left" valign="top">RF</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Narwat et al [<xref ref-type="bibr" rid="ref103">103</xref>]</td><td align="left" valign="top">RespiBAN</td><td align="left" valign="top">EDA, ECG, and TEMP</td><td align="left" valign="top">CNN, KNN, and XGBoost</td><td align="left" valign="top">Accuracy, precision, recall, <italic>F</italic><sub>1</sub>-score, and support</td><td align="left" valign="top">CNN</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Sadruddin et al [<xref ref-type="bibr" rid="ref117">117</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP</td><td align="left" valign="top">DT, XGBoost, LR, and LDA</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">XGBoost</td><td align="left" valign="top">10-fold CV</td></tr><tr><td align="left" valign="top">Jahanjoo et al [<xref ref-type="bibr" rid="ref118">118</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">PPG</td><td align="left" valign="top">KNN, LDA, SVM, DT, RF, and AdaBoost</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">SVM</td><td align="left" valign="top">CV</td></tr><tr><td align="left" valign="top">Karpagam et al [<xref ref-type="bibr" rid="ref120">120</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">ACC, EDA, and TEMP</td><td align="left" valign="top">RF and LR</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">RF</td><td align="left" valign="top">10-fold CV</td></tr><tr><td align="left" valign="top">Hsu [<xref ref-type="bibr" rid="ref124">124</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">EDA</td><td align="left" valign="top">LDA, SVM, and KNN</td><td align="left" valign="top">Precision, recall, <italic>F</italic><sub>1</sub>-score, and accuracy</td><td align="left" valign="top">SVM</td><td align="left" valign="top">Train and test split</td></tr><tr><td align="left" valign="top">Calbert and Tonekaboni [<xref ref-type="bibr" rid="ref127">127</xref>]</td><td align="left" valign="top">Hexoskin vests and Actigraph watches</td><td align="left" valign="top">HR, RESP, breathing volume, and movement</td><td align="left" valign="top">RF, KNN, XGBoost, and NN</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">RF</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Le et al [<xref ref-type="bibr" rid="ref130">130</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">HR, EDA, and TEMP</td><td align="left" valign="top">SVM and KNN</td><td align="left" valign="top"><italic>F</italic><sub>1</sub>-score and accuracy</td><td align="left" valign="top">KNN</td><td align="left" valign="top">10-fold CV</td></tr><tr><td align="left" valign="top">Fernandez et al [<xref ref-type="bibr" rid="ref131">131</xref>]</td><td align="left" valign="top">EEG Enobio device and the BIOPAC MP36</td><td align="left" valign="top">EEG</td><td align="left" valign="top">LightGBM<sup><xref ref-type="table-fn" rid="table2fn43">aq</xref></sup>, CNN, KNN, and SVM</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">LightGBM</td><td align="left" valign="top">Train and test split and 5-fold CV</td></tr><tr><td align="left" valign="top">Shikha et al [<xref ref-type="bibr" rid="ref140">140</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">EDA, PPG, and ACC</td><td align="left" valign="top">Gradient Boosting, SVM, KNN, RF, and EBM<sup><xref ref-type="table-fn" rid="table2fn44">ar</xref></sup></td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Gradient boosting</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Aqajari et al [<xref ref-type="bibr" rid="ref149">149</xref>]</td><td align="left" valign="top">Samsung Galaxy Gear Sport watches</td><td align="left" valign="top">PPG</td><td align="left" valign="top">KNN, RF, and XGBoost</td><td align="left" valign="top"><italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">RF</td><td align="left" valign="top">5-fold CV</td></tr></tbody></table><table-wrap-foot><fn id="table2fn1"><p><sup>a</sup>BVP: blood volume pulse.</p></fn><fn id="table2fn2"><p><sup>b</sup>EDA: electrodermal activity.</p></fn><fn id="table2fn3"><p><sup>c</sup>ESP: echo squeezing protocol. </p></fn><fn id="table2fn4"><p><sup>d</sup>DT: decision tree.</p></fn><fn id="table2fn5"><p><sup>e</sup>RF: random forest.</p></fn><fn id="table2fn6"><p><sup>f</sup>AdaBoost: adaptive boosting.</p></fn><fn id="table2fn7"><p><sup>g</sup>SVM: support vector machine.</p></fn><fn id="table2fn8"><p><sup>h</sup>KNN: k-nearest neighbor.</p></fn><fn id="table2fn9"><p><sup>i</sup>LR: logistic regression.</p></fn><fn id="table2fn10"><p><sup>j</sup>LDA: linear discriminant analysis.</p></fn><fn id="table2fn11"><p><sup>k</sup>RESP: response.</p></fn><fn id="table2fn12"><p><sup>l</sup>ECG: electrocardiogram.</p></fn><fn id="table2fn13"><p><sup>m</sup>PPG: photoplethysmography. </p></fn><fn id="table2fn14"><p><sup>n</sup>XGBoost: extreme gradient boosting.</p></fn><fn id="table2fn15"><p><sup>o</sup>Not available.</p></fn><fn id="table2fn16"><p><sup>p</sup>FCM: fuzzy c-means.</p></fn><fn id="table2fn17"><p><sup>q</sup>HR: heart rate.</p></fn><fn id="table2fn18"><p><sup>r</sup>SC: skin conductance.</p></fn><fn id="table2fn19"><p><sup>s</sup>TEMP: temperature.</p></fn><fn id="table2fn20"><p><sup>t</sup>ANN: artificial neural network.</p></fn><fn id="table2fn21"><p><sup>u</sup>EEG: electroencephalogram.</p></fn><fn id="table2fn22"><p><sup>v</sup>GSR: galvanic skin response.</p></fn><fn id="table2fn23"><p><sup>w</sup>MLP: multilayer perceptron.</p></fn><fn id="table2fn24"><p><sup>x</sup>CV: cross-validation.</p></fn><fn id="table2fn25"><p><sup>y</sup>PRV: pulse rate variability.</p></fn><fn id="table2fn26"><p><sup>z</sup>NN: neural network.</p></fn><fn id="table2fn27"><p><sup>aa</sup>CNN: convolutional neural network.</p></fn><fn id="table2fn28"><p><sup>ab</sup>ACC: accelerometer.</p></fn><fn id="table2fn29"><p><sup>ac</sup>LSTM: long short-term memory.</p></fn><fn id="table2fn30"><p><sup>ad</sup>MAE: mean absolute error.</p></fn><fn id="table2fn31"><p><sup>ae</sup>HRV: heart rate variability.</p></fn><fn id="table2fn32"><p><sup>af</sup>LOSO: leave-one-subject-out.</p></fn><fn id="table2fn33"><p><sup>ag</sup>EMA: ecological momentary assessment.</p></fn><fn id="table2fn34"><p><sup>ah</sup>EMG: electromyography.</p></fn><fn id="table2fn35"><p><sup>ai</sup>IBI: interbeat interval.</p></fn><fn id="table2fn36"><p><sup>aj</sup>ML: machine learning </p></fn><fn id="table2fn37"><p><sup>ak</sup>AUC: area under the receiver operating characteristic curve.</p></fn><fn id="table2fn38"><p><sup>al</sup>PSQI: Pittsburgh Sleep Quality Index.</p></fn><fn id="table2fn39"><p><sup>am</sup>DASS-21: Depression Anxiety Stress Scales&#x2013;21.</p></fn><fn id="table2fn40"><p><sup>an</sup>CD-RISC: Connor&#x2013;Davidson Resilience Scale.</p></fn><fn id="table2fn41"><p><sup>ao</sup>IPAQ: International Physical Activity Questionnaire.</p></fn><fn id="table2fn42"><p><sup>ap</sup>FNN: feedforward neural network.</p></fn><fn id="table2fn43"><p><sup>aq</sup>LightGBM: light gradient boosting machine.</p></fn><fn id="table2fn44"><p><sup>ar</sup>EBM: explainable boosting machine.</p></fn></table-wrap-foot></table-wrap><table-wrap id="t3" position="float"><label>Table 3.</label><caption><p>Details for studies testing or comparing their own framework or conducting statistical analyses.</p></caption><table id="table3" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Study</td><td align="left" valign="bottom">Device used</td><td align="left" valign="bottom">Features used</td><td align="left" valign="bottom">Algorithm analysis</td><td align="left" valign="bottom">Performance measure</td><td align="left" valign="bottom">Results</td><td align="left" valign="bottom">Validation</td></tr></thead><tbody><tr><td align="left" valign="top">Faro and Giordano [<xref ref-type="bibr" rid="ref39">39</xref>]</td><td align="left" valign="top">ECG<sup><xref ref-type="table-fn" rid="table3fn1">a</xref></sup> wearable and wearable body sensor network</td><td align="left" valign="top">HR<sup><xref ref-type="table-fn" rid="table3fn2">b</xref></sup>, activity, time, and location</td><td align="left" valign="top">ANN<sup><xref ref-type="table-fn" rid="table3fn3">c</xref></sup> and SOM<sup><xref ref-type="table-fn" rid="table3fn4">d</xref></sup> for proposed framework</td><td align="left" valign="top">Classification tool</td><td align="left" valign="top">Model successful</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Faro et al [<xref ref-type="bibr" rid="ref40">40</xref>]</td><td align="left" valign="top">ECG wearable and wearable body sensor network</td><td align="left" valign="top">HR</td><td align="left" valign="top">SOFM<sup><xref ref-type="table-fn" rid="table3fn5">e</xref></sup></td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table3fn6">f</xref></sup></td><td align="left" valign="top">Defined as accurate enough</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Li and Sano [<xref ref-type="bibr" rid="ref45">45</xref>]</td><td align="left" valign="top">Wrist</td><td align="left" valign="top">SC<sup><xref ref-type="table-fn" rid="table3fn7">g</xref></sup>, TEMP<sup><xref ref-type="table-fn" rid="table3fn8">h</xref></sup>, and ACC<sup><xref ref-type="table-fn" rid="table3fn9">i</xref></sup></td><td align="left" valign="top">L2 and 1-norm regularized multitask least squares regression</td><td align="left" valign="top">Mean squared error and MAE<sup><xref ref-type="table-fn" rid="table3fn10">j</xref></sup></td><td align="left" valign="top">Early fusion better</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Cheadle et al [<xref ref-type="bibr" rid="ref46">46</xref>]</td><td align="left" valign="top">SAM<sup><xref ref-type="table-fn" rid="table3fn11">k</xref></sup> activity wearable, EDA<sup><xref ref-type="table-fn" rid="table3fn12">l</xref></sup> sensor, and Empatica E4</td><td align="left" valign="top">EDA<sup><xref ref-type="table-fn" rid="table3fn12">l</xref></sup></td><td align="left" valign="top">Linear regression</td><td align="left" valign="top">Statistical correlation</td><td align="left" valign="top">Support prior findings that perceived microaggressive discrimination increases negative emotion</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Chen et al [<xref ref-type="bibr" rid="ref47">47</xref>]</td><td align="left" valign="top">Personalized system and surveys</td><td align="left" valign="top">Survey questions</td><td align="left" valign="top">Proposed framework</td><td align="left" valign="top">MAE</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Gupta et al [<xref ref-type="bibr" rid="ref48">48</xref>]</td><td align="left" valign="top">RespiBAN and Empatica E4</td><td align="left" valign="top">ECG, EMG<sup><xref ref-type="table-fn" rid="table3fn13">m</xref></sup>, TEMP, RESP<sup><xref ref-type="table-fn" rid="table3fn14">n</xref></sup>, BVP<sup><xref ref-type="table-fn" rid="table3fn15">o</xref></sup>, EDA, and ACC</td><td align="left" valign="top">CNN<sup><xref ref-type="table-fn" rid="table3fn16">p</xref></sup> and k-medoid clustering</td><td align="left" valign="top">Accuracy and execution time</td><td align="left" valign="top">Success</td><td align="left" valign="top">4-fold CV<sup><xref ref-type="table-fn" rid="table3fn17">q</xref></sup></td></tr><tr><td align="left" valign="top">Azgomi et al [<xref ref-type="bibr" rid="ref51">51</xref>]</td><td align="left" valign="top">Affectiva Q Curve and Nonin Wireless WristOx2 oximeter</td><td align="left" valign="top">SC, TEMP, ACC, HR, and blood oxygenation</td><td align="left" valign="top">Bayesian filtering with an expectation maximization (EM)</td><td align="left" valign="top"><italic>t</italic> test comparison</td><td align="left" valign="top">Success</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Wu et al [<xref ref-type="bibr" rid="ref53">53</xref>]</td><td align="left" valign="top">Wrist and smartphone</td><td align="left" valign="top">EDA, PPG<sup><xref ref-type="table-fn" rid="table3fn18">r</xref></sup>, TEMP, and ACC</td><td align="left" valign="top">Proposed framework and SVM<sup><xref ref-type="table-fn" rid="table3fn19">s</xref></sup></td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Framework proposed</td><td align="left" valign="top">5-fold CV</td></tr><tr><td align="left" valign="top">Jelsma et al [<xref ref-type="bibr" rid="ref54">54</xref>]</td><td align="left" valign="top">Wrist-worn EDA sensor, Empatica E4, and smartphone</td><td align="left" valign="top">EDA</td><td align="left" valign="top">Econometric fixed-effects with robust SE regression approach</td><td align="left" valign="top">Statistical analyses</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Lai et al [<xref ref-type="bibr" rid="ref55">55</xref>]</td><td align="left" valign="top">Wearable body sensor network</td><td align="left" valign="top">TEMP and EDA</td><td align="left" valign="top">Proposed framework with Res-TCN<sup><xref ref-type="table-fn" rid="table3fn20">t</xref></sup> classifier</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">High accuracy</td><td align="left" valign="top">LOOCV<sup><xref ref-type="table-fn" rid="table3fn21">u</xref></sup></td></tr><tr><td align="left" valign="top">Li and Sano [<xref ref-type="bibr" rid="ref57">57</xref>]</td><td align="left" valign="top">Wrist</td><td align="left" valign="top">TEMP, SC, and ACC</td><td align="left" valign="top">MTL<sup><xref ref-type="table-fn" rid="table3fn22">v</xref></sup> linear regression model and k-means clustering for the proposed framework</td><td align="left" valign="top">MSE<sup><xref ref-type="table-fn" rid="table3fn23">w</xref></sup> and MAE</td><td align="left" valign="top">The framework can extract features better than feature crafting or static autoencoders, and temporal features demonstrated significantly higher precision than static and crafted features.</td><td align="left" valign="top">4-fold CV</td></tr><tr><td align="left" valign="top">Gil-Martin et al [<xref ref-type="bibr" rid="ref59">59</xref>]</td><td align="left" valign="top">RespiBAN and Empatica E4</td><td align="left" valign="top">ACC, TEMP, RESP, ECG, EMG, EDA, and BVP</td><td align="left" valign="top">CNN</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Han et al [<xref ref-type="bibr" rid="ref60">60</xref>]</td><td align="left" valign="top">Wrist</td><td align="left" valign="top">EDA, TEMP, ACC, HR, and blood oxygenation</td><td align="left" valign="top">Adversarial networks and transfer learning</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Disentangled adversarial transfer learning framework</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Momeni et al [<xref ref-type="bibr" rid="ref62">62</xref>]</td><td align="left" valign="top">Biopac system</td><td align="left" valign="top">ECG, RESP, PPG, and EDA</td><td align="left" valign="top">XGBoost<sup><xref ref-type="table-fn" rid="table3fn24">x</xref></sup> algorithm</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">Group Shuffle Split CV with 10 iterations.</td></tr><tr><td align="left" valign="top">Rashid et al [<xref ref-type="bibr" rid="ref63">63</xref>]</td><td align="left" valign="top">Wrist-based PPG sensor</td><td align="left" valign="top">BVP</td><td align="left" valign="top">CNN</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub></td><td align="left" valign="top">Success</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Yannam et al [<xref ref-type="bibr" rid="ref64">64</xref>]</td><td align="left" valign="top">Smartphones (Android) and fitness trackers (eg, OnePlus Band)</td><td align="left" valign="top">User screen time, devices around user, mobile and application usage stats, mobile interaction, location data, HR, sleep data, and step counts</td><td align="left" valign="top">Proposed framework</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Pakhomov et al [<xref ref-type="bibr" rid="ref65">65</xref>]</td><td align="left" valign="top">Fitbit</td><td align="left" valign="top">HR and activity</td><td align="left" valign="top"><italic>t</italic> test, significance levels, and Spearman rank test</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Holder et al [<xref ref-type="bibr" rid="ref66">66</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">ACC, BVP, EDA, and TEMP</td><td align="left" valign="top">KNN<sup><xref ref-type="table-fn" rid="table3fn25">y</xref></sup>, DT<sup><xref ref-type="table-fn" rid="table3fn26">z</xref></sup>, and CNN</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub></td><td align="left" valign="top">Single modality showed promise</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Heo et al [<xref ref-type="bibr" rid="ref68">68</xref>]</td><td align="left" valign="top">PPG sensor</td><td align="left" valign="top">HR</td><td align="left" valign="top">DT, RF<sup><xref ref-type="table-fn" rid="table3fn27">aa</xref></sup>, Ada-boosting<sup><xref ref-type="table-fn" rid="table3fn28">ab</xref></sup>, 9-NN<sup><xref ref-type="table-fn" rid="table3fn29">ac</xref></sup>, LDA<sup><xref ref-type="table-fn" rid="table3fn30">ad</xref></sup>, SVM, gradient-boosting, and the proposed framework OMDP<sup><xref ref-type="table-fn" rid="table3fn31">ae</xref></sup></td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub></td><td align="left" valign="top">OMDP</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Kar et al [<xref ref-type="bibr" rid="ref69">69</xref>]</td><td align="left" valign="top">Wrist and chest</td><td align="left" valign="top">ACC, EDA, and TEMP</td><td align="left" valign="top">Binary classifier based on GRU<sup><xref ref-type="table-fn" rid="table3fn32">af</xref></sup> and RNN<sup><xref ref-type="table-fn" rid="table3fn33">ag</xref></sup></td><td align="left" valign="top">Precision, recall, <italic>F</italic><sub>1</sub>, and accuracy</td><td align="left" valign="top">Support the use of a modest set of signals that are easily collected on wearables.</td><td align="left" valign="top"/></tr><tr><td align="left" valign="top">Samyoun et al [<xref ref-type="bibr" rid="ref71">71</xref>]</td><td align="left" valign="top">Smart wrist devices</td><td align="left" valign="top">ECG, EDA, EMG, TEMP, and RESP</td><td align="left" valign="top">RF, Extra Trees (EXT), DT, LDA, LR<sup><xref ref-type="table-fn" rid="table3fn34">ah</xref></sup>, and MLP<sup><xref ref-type="table-fn" rid="table3fn35">ai</xref></sup></td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub></td><td align="left" valign="top">Chest better than wrist sensors, and a combination of both is better than just chest.</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Vidal Bustamante et al [<xref ref-type="bibr" rid="ref32">32</xref>]</td><td align="left" valign="top">Wearables, wristband actigraphy data, and smartphone-based self-report surveys.</td><td align="left" valign="top">Self-report surveys on physical health, daily consumption habits, positive and negative affect, studying behaviors, stress levels and sources, sociability and support, and actigraphy</td><td align="left" valign="top">Linear modeling and clustering</td><td align="left" valign="top">BIC<sup><xref ref-type="table-fn" rid="table3fn36">aj</xref></sup></td><td align="left" valign="top">&#x2014;</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Wu et al [<xref ref-type="bibr" rid="ref74">74</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">EDA, BVP, and HR</td><td align="left" valign="top">K-means model with 2 clusters</td><td align="left" valign="top">Silhouette score</td><td align="left" valign="top">Comparable to state-of-the-art unsupervised methods.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Tutunji et al [<xref ref-type="bibr" rid="ref76">76</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">HR, SC, ST<sup><xref ref-type="table-fn" rid="table3fn37">ak</xref></sup>, ACC, and surveys</td><td align="left" valign="top">Linear mixed-effects models, paired sample <italic>t</italic> test, and RF</td><td align="left" valign="top">Error rate</td><td align="left" valign="top">Individualized models combined EMA<sup><xref ref-type="table-fn" rid="table3fn38">al</xref></sup> with physiology performed best, while group-based models performed worse.</td><td align="left" valign="top">LOSO<sup><xref ref-type="table-fn" rid="table3fn39">am</xref></sup> and LOBO<sup><xref ref-type="table-fn" rid="table3fn40">an</xref></sup></td></tr><tr><td align="left" valign="top">Abdul Kader et al [<xref ref-type="bibr" rid="ref78">78</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">ACC, BVP, TEMP, EDA, HR, and HRV<sup><xref ref-type="table-fn" rid="table3fn41">ao</xref></sup></td><td align="left" valign="top">DNN<sup><xref ref-type="table-fn" rid="table3fn42">ap</xref></sup></td><td align="left" valign="top">Accuracy, precision, recall, <italic>F</italic><sub>1</sub>-score, and AUROC<sup><xref ref-type="table-fn" rid="table3fn43">aq</xref></sup></td><td align="left" valign="top">Privacy-preserving stress detection system using federated learning, providing privacy to the patient&#x2019;s data.</td><td align="left" valign="top">CV</td></tr><tr><td align="left" valign="top">Vos et al [<xref ref-type="bibr" rid="ref29">29</xref>]</td><td align="left" valign="top">Empatica E4, Mobi, and RespiBAN</td><td align="left" valign="top">EDA, HRV, ECG, ACC, EDA, ST, HR, SPO2<sup><xref ref-type="table-fn" rid="table3fn44">ar</xref></sup>, ACC, BVP, IBI<sup><xref ref-type="table-fn" rid="table3fn45">as</xref></sup>, EMG, and RESP</td><td align="left" valign="top">RF, SVM, ANN, and XGBoost</td><td align="left" valign="top">Accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">An ensemble ML<sup><xref ref-type="table-fn" rid="table3fn46">at</xref></sup> model trained on a synthesized multidataset to improve the generalization of prediction.</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Darwish et al [<xref ref-type="bibr" rid="ref82">82</xref>]</td><td align="left" valign="top">Fitbit Sense 2, Flowtime, Movesense, Prana, and Sentio Solutions Feel Terapeutics</td><td align="left" valign="top">ECG, EDA, and RESP</td><td align="left" valign="top">RF, XGBoost, KNN, LR, DT, AdaBoost, Extra Trees, Bagging classifier, LDA, and QDA<sup><xref ref-type="table-fn" rid="table3fn47">au</xref></sup></td><td align="left" valign="top">Accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Validated multimodal wearable data in controlled (WESAD)<sup><xref ref-type="table-fn" rid="table3fn48">av</xref></sup> and real-life (SWEET)<sup><xref ref-type="table-fn" rid="table3fn49">aw</xref></sup> datasets for binary and 5-class stress detection.</td><td align="left" valign="top">CV</td></tr><tr><td align="left" valign="top">Bloomfield et al [<xref ref-type="bibr" rid="ref3">3</xref>]</td><td align="left" valign="top">Oura Ring</td><td align="left" valign="top">Sleep, surveys, ACC, HR, HRV, and RESP</td><td align="left" valign="top">Mixed-effects regression models</td><td align="left" valign="top">Coefficient and <italic>P</italic> value</td><td align="left" valign="top">Used sleep estimates from wearables in the prediction of perceived stress.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Nazeer et al [<xref ref-type="bibr" rid="ref84">84</xref>]</td><td align="left" valign="top">Customized proposed STRESS-CARE and stress detection sensor</td><td align="left" valign="top">ECG, EDA, BVP, EMG, TEMP, and sweat</td><td align="left" valign="top">XGBoost, DT, RF, and SVM</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Wrist-worn sensors (2-class and 3-class) prediction model performed worse than chest sensors (2-class).</td><td align="left" valign="top">Exploring various combinations of input sensor data.</td></tr><tr><td align="left" valign="top">Xuanzhi et al [<xref ref-type="bibr" rid="ref88">88</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">EDA and HRV</td><td align="left" valign="top">Attention mechanism-based XLNet model, BrainNet, Xception, EfficientNetB4, VGG19, ResNet-50, MobileNet, and InceptionV3</td><td align="left" valign="top">Accuracy, recall, precision, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Proposed attention mechanism-based XLNet model for continuous stress monitoring.</td><td align="left" valign="top">Train/test split and CV</td></tr><tr><td align="left" valign="top">Vidal et al [<xref ref-type="bibr" rid="ref89">89</xref>]</td><td align="left" valign="top">Actigraphy</td><td align="left" valign="top">Sleep duration and self-reports on stress and sleep</td><td align="left" valign="top">Individual-level linear model with a Bayesian framework</td><td align="left" valign="top">Bayesian metrics (pd, UIs, ROPE, ESS, and R-hat)</td><td align="left" valign="top">Negative associations between sleep duration and perceived stress in participants.</td><td align="left" valign="top">Stable estimates of lead-lag associations.</td></tr><tr><td align="left" valign="top">Tazarv et al [<xref ref-type="bibr" rid="ref91">91</xref>]</td><td align="left" valign="top">Samsung Galaxy Gear Sport</td><td align="left" valign="top">PPG, ACC, GYR<sup><xref ref-type="table-fn" rid="table3fn50">ax</xref></sup>, and atmospheric pressure</td><td align="left" valign="top">SVM, XGBoost, and RF with a context-aware Deep Q-Network (DQN)</td><td align="left" valign="top">Recall</td><td align="left" valign="top">A model with a context-aware active learning strategy for fine-grained, personalized stress detection worked with fewer queries.</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Ganesan et al [<xref ref-type="bibr" rid="ref97">97</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">ACC, PPG, ECG, EMG, EDA, RESP, and TEMP</td><td align="left" valign="top">DNN and 1D-CNN</td><td align="left" valign="top">ROC-AUC<sup><xref ref-type="table-fn" rid="table3fn51">ay</xref></sup>, <italic>F</italic><sub>1</sub>-score, accuracy, latency, and memory</td><td align="left" valign="top">An optimized, cost-effective, real-time, and energy-efficient DNN model demonstrated superior performance.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Neigel et al [<xref ref-type="bibr" rid="ref99">99</xref>]</td><td align="left" valign="top">Oura Ring</td><td align="left" valign="top">HR, HRV, activity, and sleep</td><td align="left" valign="top">Mixed effects model</td><td align="left" valign="top"><italic>P</italic> value and regression coefficients</td><td align="left" valign="top">Heightened waking HR and max waking HR, alongside sleep HR, sleep HRV, activity patterns, and sleep phases, during periods coinciding with significant academic and societal events.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Pogliaghi et al [<xref ref-type="bibr" rid="ref100">100</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">EDA and BVP</td><td align="left" valign="top">RF, XGBoost, and MTL</td><td align="left" valign="top"><italic>F</italic><sub>1</sub>-score and accuracy</td><td align="left" valign="top">The proposed MTL model improved compared to single-task models.</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Lopez et al [<xref ref-type="bibr" rid="ref105">105</xref>]</td><td align="left" valign="top">Fitbits</td><td align="left" valign="top">Calories burned, HR, sleep, steps, and distance</td><td align="left" valign="top">AdaBoost</td><td align="left" valign="top"><italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Aggregation levels of 4 and 12 hours performed best with the calories and sleep modalities outperforming other modalities.</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Wilfred et al [<xref ref-type="bibr" rid="ref106">106</xref>]</td><td align="left" valign="top">Wyoware devices</td><td align="left" valign="top">EMG and GSR<sup><xref ref-type="table-fn" rid="table3fn52">az</xref></sup></td><td align="left" valign="top">Transfer learning model networks with CNN compared with SVM, DNN, LSTM<sup><xref ref-type="table-fn" rid="table3fn53">ba</xref></sup>, and CNN + LSTM</td><td align="left" valign="top">Accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">The proposed stress detection tool, equipped with an IoT<sup><xref ref-type="table-fn" rid="table3fn54">bb</xref></sup> system and VR<sup><xref ref-type="table-fn" rid="table3fn55">bc</xref></sup>, worked best.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Gait&#x00E1;n-Padilla et al [<xref ref-type="bibr" rid="ref108">108</xref>]</td><td align="left" valign="top">customized wearable polymeric optical fiber sensor, fiber Bragg grating, and ECG sensor</td><td align="left" valign="top">Pulse and RESP</td><td align="left" valign="top">Bagged DT, KNN, DT, and SVM</td><td align="left" valign="top">Accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Used a low-cost wearable polymeric optical fiber sensor to classify stress.</td><td align="left" valign="top">Comparison</td></tr><tr><td align="left" valign="top">Gupta et al [<xref ref-type="bibr" rid="ref109">109</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, PPG, and GSR</td><td align="left" valign="top">RF, SVM, LDA, KNN, NN, and DT</td><td align="left" valign="top">Accuracy, sensitivity, specificity, precision, <italic>F</italic><sub>1</sub>-score, Matthew&#x2019;s correlation coefficient, and Cohen kappa</td><td align="left" valign="top">Wrist-worn sensors performed less than chest-worn sensors.</td><td align="left" valign="top">LOOCV</td></tr><tr><td align="left" valign="top">Sakanti et al [<xref ref-type="bibr" rid="ref112">112</xref>]</td><td align="left" valign="top">RespiBAN</td><td align="left" valign="top">ACC, ECG, EDA, EMG, TEMP, and RESP</td><td align="left" valign="top">Extreme gradient boosting</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Evaluated extreme gradient boosting in stress classification with high accuracy.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Shedage et al [<xref ref-type="bibr" rid="ref113">113</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">BVP, ECG, EDA, EMG, RESP, TEMP, and ACC</td><td align="left" valign="top">LR, DT, RF, and SEL<sup><xref ref-type="table-fn" rid="table3fn56">bd</xref></sup></td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">SEL worked for a generalized, personalized model. SEL: LR, DT, and RF as base model and RF as meta model.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Tanwar et al [<xref ref-type="bibr" rid="ref115">115</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP</td><td align="left" valign="top">XGBoost, LGBoost<sup><xref ref-type="table-fn" rid="table3fn57">be</xref></sup>, and CatBoost<sup><xref ref-type="table-fn" rid="table3fn58">bf</xref></sup></td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Evaluated the effectiveness of data fusion methods, an accuracy increases with increase in modalities, and 5 modalities had best performance.</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Gullapalli et al [<xref ref-type="bibr" rid="ref116">116</xref>]</td><td align="left" valign="top">PPG sensors in consumer-grade earbud devices</td><td align="left" valign="top">HRV</td><td align="left" valign="top">RF</td><td align="left" valign="top">Accuracy, specificity, and sensitivity</td><td align="left" valign="top">Compared stress detection with the most prominent HRV library HeartPy.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Parousidou et al [<xref ref-type="bibr" rid="ref119">119</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP</td><td align="left" valign="top">LDA, log reg, DT, NB<sup><xref ref-type="table-fn" rid="table3fn59">bg</xref></sup>, RF, GB, user-based splitting, single-attribute splitting, multiattribute splitting, single task learning, and MTL.</td><td align="left" valign="top"><italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Personalized approach performed better in lab settings and worse in the wild, outperforming one-size-fits-all.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Sethia et al [<xref ref-type="bibr" rid="ref121">121</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">IBI from HRV, BVP, EDA, and TEMP</td><td align="left" valign="top">GB, RF, DT, SVM, KNN, and XGBoost</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">EDA + BVP + HRV performed well with GB for 2-level and 3-level stress classification, with HRV and EDA being the most important features.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Benita et al [<xref ref-type="bibr" rid="ref123">123</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">PPG</td><td align="left" valign="top">CNN</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Developed a stress detection system investigating CNN.</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Carmisciano et al [<xref ref-type="bibr" rid="ref125">125</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">EDA and HR</td><td align="left" valign="top">FDA<sup><xref ref-type="table-fn" rid="table3fn60">bh</xref></sup>, RF, and LM<sup><xref ref-type="table-fn" rid="table3fn61">bi</xref></sup></td><td align="left" valign="top">Partial R-squared</td><td align="left" valign="top">FDA models generally fit better than LM and RF.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Warrier et al [<xref ref-type="bibr" rid="ref126">126</xref>]</td><td align="left" valign="top">RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, RR, TEMP, and ACC</td><td align="left" valign="top">DNN and federated learning</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Federated learning&#x2013;based stress detection method, focused on privacy protection with high accuracy.</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Hoang et al [<xref ref-type="bibr" rid="ref1">1</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP</td><td align="left" valign="top">XGBoost</td><td align="left" valign="top"><italic>F</italic><sub>1</sub>-score, precision, and recall</td><td align="left" valign="top">Personalization performed better</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Hasanpoor et al [<xref ref-type="bibr" rid="ref129">129</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">PPG</td><td align="left" valign="top">CNN</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Optimized model of reduced size and space addressing resource constraints.</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Tanwar et al [<xref ref-type="bibr" rid="ref132">132</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EMG, and RESP</td><td align="left" valign="top">A hybrid deep learning network consisting of long short-term memory and gated recurrent unit (LSTM-GRU) with an attention layer</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Proposed well-performing personalized stress detection.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Huang et al [<xref ref-type="bibr" rid="ref133">133</xref>]</td><td align="left" valign="top">RespiBAN</td><td align="left" valign="top">ECG</td><td align="left" valign="top">A hybrid model combining CNN and SVM</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">A hybrid model combining a CNN and SVM performed with high accuracy.</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Oh et al [<xref ref-type="bibr" rid="ref134">134</xref>]</td><td align="left" valign="top">RespiBAN</td><td align="left" valign="top">ACC, ECG, EDA, EMG, TEMP, and RESP</td><td align="left" valign="top">Three CNN-based classifiers and an ensemble attention module</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">An ensemble-based stress detection model that used multimodal features and metadata to capture personalized patterns.</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Thapa et al [<xref ref-type="bibr" rid="ref135">135</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP</td><td align="left" valign="top">Conducted experiments using 4 state-of-the-art LLMs:<sup><xref ref-type="table-fn" rid="table3fn62">bj</xref></sup> GPT (4 and 3.5-Turbo), Llama2, BioMistralDARE, and Gemini-Pro.</td><td align="left" valign="top">Accuracy and MAE</td><td align="left" valign="top">For LLMs, parameter size did not correlate with accuracy; smaller models such as GPT-3.5-Turbo performed comparably to larger ones like GPT-4, though these models overall performed worse.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Tsiampa et al [<xref ref-type="bibr" rid="ref137">137</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">EDA</td><td align="left" valign="top">Statistical correlation analyses</td><td align="left" valign="top">Correlation</td><td align="left" valign="top">A relationship exists between EDA and stress levels related to social media content, with a strong correlation.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Fazeli et al [<xref ref-type="bibr" rid="ref138">138</xref>]</td><td align="left" valign="top">Garmin vivoactive 4S</td><td align="left" valign="top">HR, HRV, number of floors climbed, BMR<sup><xref ref-type="table-fn" rid="table3fn63">bk</xref></sup> kilocalories, distance traveled, activity levels, SPO2, and RESP</td><td align="left" valign="top">RNN, LSTM, and MLP</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Proposed a multimodal semisupervised framework for tracking physiological precursors of the stress response; Late-fusion + Supervised Training + Contrastive Regularization performed best.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Subathra and Malarvizhi [<xref ref-type="bibr" rid="ref139">139</xref>]</td><td align="left" valign="top">Empatica E4</td><td align="left" valign="top">EDA and HR</td><td align="left" valign="top">K-means and agglomerative clustering</td><td align="left" valign="top">Silhouette score</td><td align="left" valign="top">Agglomerative clustering obtained in the proposed method outperformed.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Andreas et al [<xref ref-type="bibr" rid="ref141">141</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP</td><td align="left" valign="top">CNNs in conjunction with transfer learning</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Proposed method&#x2019;s effectiveness outperformed state-of-the-art classification techniques in the field using transfer learning.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Lee et al [<xref ref-type="bibr" rid="ref21">21</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP</td><td align="left" valign="top">DNN augmented with attention mechanisms</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Enhanced DNN capabilities by integrating both raw signals and human-engineered features altogether.</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Kasnesis et al [<xref ref-type="bibr" rid="ref142">142</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP features extracted by a temporal CNN.</td><td align="left" valign="top">TranSenseFuser is comprised of temporal convolutions followed by feature-level or sequence-level multihead attention.</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Model performed well for stress prediction.</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Ciharova et al [<xref ref-type="bibr" rid="ref143">143</xref>]</td><td align="left" valign="top">VU-AMS</td><td align="left" valign="top">ECG and EDA</td><td align="left" valign="top">Bayesian ridge regression</td><td align="left" valign="top">Accuracy, <italic>F</italic><sub>1</sub>-score, and r2</td><td align="left" valign="top">Performance ranged from acceptable to good, but only for the presentation stressor, best algorithm performance was a weak relationship between the detected and observed score</td><td align="left" valign="top">LOSO</td></tr><tr><td align="left" valign="top">Darwish et al [<xref ref-type="bibr" rid="ref144">144</xref>]</td><td align="left" valign="top">RespiBAN</td><td align="left" valign="top">ECG, EDA, and RESP</td><td align="left" valign="top">RF, XGBoost, KNN, LR, DT, AB<sup><xref ref-type="table-fn" rid="table3fn64">bl</xref></sup>, ET, BAG<sup><xref ref-type="table-fn" rid="table3fn65">bm</xref></sup>, QDA, LDA, and ensemble models using majority voting and weighted averaging.</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Binary stress classification performed better than five-class classification</td><td align="left" valign="top">K-fold CV</td></tr><tr><td align="left" valign="top">Nuamah [<xref ref-type="bibr" rid="ref145">145</xref>]</td><td align="left" valign="top">Empatica E4 and Tobii Pro Glasses 2</td><td align="left" valign="top">Vagally mediated heart variability measures (vmHRV) and task-evoked pupillary response (TEPR)</td><td align="left" valign="top">Mixed-effects modeling</td><td align="left" valign="top">r2</td><td align="left" valign="top">vmHRV measures and TEPR are sensitive enough to quantify psychophysiological responses to recurrent task-induced stress</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Saylam and &#x0130;ncel [<xref ref-type="bibr" rid="ref19">19</xref>]</td><td align="left" valign="top">Fitbit</td><td align="left" valign="top">Step counts, active minutes, HR, and sleep metrics</td><td align="left" valign="top">RF, XGBoost, LSTM, and regression</td><td align="left" valign="top">MAE</td><td align="left" valign="top">With MTL, RF had the lowest error while looking back 7 and 15 days</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Sa-nguannarm et al [<xref ref-type="bibr" rid="ref146">146</xref>]</td><td align="left" valign="top">Empatica E4 and RespiBAN</td><td align="left" valign="top">ECG, EDA, EMG, ACC, TEMP, and RESP</td><td align="left" valign="top">Bi-LSTM</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">The human lifelong monitoring model Bi-LSTM for stress behavior recognition performed well.</td><td align="left" valign="top">Train/test split</td></tr><tr><td align="left" valign="top">Nelson et al [<xref ref-type="bibr" rid="ref147">147</xref>]</td><td align="left" valign="top">Smartphone</td><td align="left" valign="top">PPG</td><td align="left" valign="top">Mixed-effects modeling</td><td align="left" valign="top">r2</td><td align="left" valign="top">Smartphone-based PPG significantly covaries with self-reported stress and anxiety.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Dahal et al [<xref ref-type="bibr" rid="ref148">148</xref>]</td><td align="left" valign="top">RespiBAN</td><td align="left" valign="top">HRV</td><td align="left" valign="top">RF</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Identified person-specific stress events with an accuracy higher than 99% after a global training framework.</td><td align="left" valign="top">15-fold CV</td></tr><tr><td align="left" valign="top">Jiao et al [<xref ref-type="bibr" rid="ref150">150</xref>]</td><td align="left" valign="top">PL3516 Powerlab 16/35 with TN1012/ST Pulse Transducer</td><td align="left" valign="top">PRV<sup><xref ref-type="table-fn" rid="table3fn66">bn</xref></sup></td><td align="left" valign="top">SVM model with linear and radial basis function kernel</td><td align="left" valign="top">Accuracy</td><td align="left" valign="top">Developed a pulse rate variability detection model with RFE<sup><xref ref-type="table-fn" rid="table3fn67">bo</xref></sup> feature selection.</td><td align="left" valign="top">5-fold CV</td></tr><tr><td align="left" valign="top">Belwafi et al [<xref ref-type="bibr" rid="ref23">23</xref>]</td><td align="left" valign="top">EEG<sup><xref ref-type="table-fn" rid="table3fn68">bp</xref></sup> sensor</td><td align="left" valign="top">EEG</td><td align="left" valign="top">Statistical thresholding mechanism on EEG bands</td><td align="left" valign="top">Accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Proposed statistical thresholding mechanism on EEG bands approach achieved an average accuracy of 88.89%.</td><td align="left" valign="top">&#x2014;</td></tr><tr><td align="left" valign="top">Patane et al [<xref ref-type="bibr" rid="ref152">152</xref>]</td><td align="left" valign="top">Smartphone</td><td align="left" valign="top">Phone call duration, conversation, physical activity, app usage, and academic deadlines</td><td align="left" valign="top">RNN, Bi-LSTM, transformer with prompt tuning</td><td align="left" valign="top">MAE and MSE</td><td align="left" valign="top">Personalized mental well-being monitoring with RNN, Bi-LSTM, and transformer with prompt tuning, where prompt-based adaptation achieved lower prediction error.</td><td align="left" valign="top">Train/validation/test split a 70%-10%-20% ratio.</td></tr><tr><td align="left" valign="top">Subathra et al [<xref ref-type="bibr" rid="ref153">153</xref>]</td><td align="left" valign="top">Custom-built wrist device</td><td align="left" valign="top">HR and EDA</td><td align="left" valign="top">Bi-LSTM</td><td align="left" valign="top">Accuracy and <italic>F</italic><sub>1</sub>-score</td><td align="left" valign="top">Developed a wearable band, in Bi-LSTM, got F1-score of 99.38% and 98.88% in multiple datasets.</td><td align="left" valign="top">Train/validation/test split a 70%-10%-20% ratio.</td></tr><tr><td align="left" valign="top">Li et al [<xref ref-type="bibr" rid="ref25">25</xref>]</td><td align="left" valign="top">PPG sensor</td><td align="left" valign="top">DASS-21<sup><xref ref-type="table-fn" rid="table3fn69">bq</xref></sup> stress score, PRV, and dPPG</td><td align="left" valign="top">1DCNN-Bi-LSTM, cross-attention, and XGBoost</td><td align="left" valign="top">MAE and RMSE<sup><xref ref-type="table-fn" rid="table3fn70">br</xref></sup></td><td align="left" valign="top">Analysis found fusion of PRV and dPPG signals yielded best detection performance.</td><td align="left" valign="top">5-fold CV</td></tr><tr><td align="left" valign="top">Van der Mee et al [<xref ref-type="bibr" rid="ref154">154</xref>]</td><td align="left" valign="top">Garmin smartwatch</td><td align="left" valign="top">Garmin HRV-derived stress score and mood EMAs.</td><td align="left" valign="top">Firstbeat analytic algorithms, mixed-effects regression, logistic multilevel models, and ANOVA</td><td align="left" valign="top">AUC and statistical significance</td><td align="left" valign="top">Analysis found Garmin Stress Score was associated with high- and moderate-intensity positive mood; it was not associated with states of high arousal negative mood.</td><td align="left" valign="top">Statistical association analysis</td></tr><tr><td align="left" valign="top">Rosenbach et al [<xref ref-type="bibr" rid="ref24">24</xref>]</td><td align="left" valign="top">Garmin Vivosmart 4 and Polar H10 chest strap</td><td align="left" valign="top">Garmin stress score, HRV, and HR</td><td align="left" valign="top">Linear mixed effect model</td><td align="left" valign="top">Statistical significance</td><td align="left" valign="top">Analysis found HR showed the strongest association with self&#x2010;reported stress, while the Garmin stress score demonstrated only marginal predictive value.</td><td align="left" valign="top">Statistical association analysis</td></tr></tbody></table><table-wrap-foot><fn id="table3fn1"><p><sup>a</sup>ECG: electrocardiogram.</p></fn><fn id="table3fn2"><p><sup>b</sup>HR: heart rate.</p></fn><fn id="table3fn3"><p><sup>c</sup>ANN: artificial neural network.</p></fn><fn id="table3fn4"><p><sup>d</sup>SOM: self-organizing map.</p></fn><fn id="table3fn5"><p><sup>e</sup>SOFM: self-organizing feature map.</p></fn><fn id="table3fn6"><p><sup>f</sup>Not available.</p></fn><fn id="table3fn7"><p><sup>g</sup>SC: skin conductance.</p></fn><fn id="table3fn8"><p><sup>h</sup>TEMP: temperature.</p></fn><fn id="table3fn9"><p><sup>i</sup>ACC: accelerometer.</p></fn><fn id="table3fn10"><p><sup>j</sup>MAE: mean absolute error.</p></fn><fn id="table3fn11"><p><sup>k</sup>SAM: Self-Assessment Manikin.</p></fn><fn id="table3fn12"><p><sup>l</sup>EDA: electrodermal activity.</p></fn><fn id="table3fn13"><p><sup>m</sup>EMG: electromyography.</p></fn><fn id="table3fn14"><p><sup>n</sup>RESP: response.</p></fn><fn id="table3fn15"><p><sup>o</sup>BVP: blood volume pulse.</p></fn><fn id="table3fn16"><p><sup>p</sup>CNN: convolutional neural network.</p></fn><fn id="table3fn17"><p><sup>q</sup>CV: cross-validation.</p></fn><fn id="table3fn18"><p><sup>r</sup>PPG: photoplethysmography. </p></fn><fn id="table3fn19"><p><sup>s</sup>SVM: support vector machine.</p></fn><fn id="table3fn20"><p><sup>t</sup>Res-TCN: residual temporal convolutional network.</p></fn><fn id="table3fn21"><p><sup>u</sup>LOOCV: leave-one-out cross-validation.</p></fn><fn id="table3fn22"><p><sup>v</sup>MTL: multitask learning.</p></fn><fn id="table3fn23"><p><sup>w</sup>MSE: mean squared error.</p></fn><fn id="table3fn24"><p><sup>x</sup>XGBoost: extreme gradient boosting.</p></fn><fn id="table3fn25"><p><sup>y</sup>KNN: k-nearest neighbor.</p></fn><fn id="table3fn26"><p><sup>z</sup>DT: decision tree.</p></fn><fn id="table3fn27"><p><sup>aa</sup>RF: random forest.</p></fn><fn id="table3fn28"><p><sup>ab</sup>Ada-boosting: adaptive boosting.</p></fn><fn id="table3fn29"><p><sup>ac</sup>NN: neural network.</p></fn><fn id="table3fn30"><p><sup>ad</sup>LDA: linear discriminant analysis.</p></fn><fn id="table3fn31"><p><sup>ae</sup>OMDP: optimized model decision process.</p></fn><fn id="table3fn32"><p><sup>af</sup>GRU: gated recurrent unit.</p></fn><fn id="table3fn33"><p><sup>ag</sup>RNN: recurrent neural network.</p></fn><fn id="table3fn34"><p><sup>ah</sup>LR: logistic regression.</p></fn><fn id="table3fn35"><p><sup>ai</sup>MLP: multilayer perceptron.</p></fn><fn id="table3fn36"><p><sup>aj</sup>BIC: Bayesian information criterion.</p></fn><fn id="table3fn37"><p><sup>ak</sup>ST: skin temperature.</p></fn><fn id="table3fn38"><p><sup>al</sup>EMA: ecological momentary assessment.</p></fn><fn id="table3fn39"><p><sup>am</sup>LOSO: leave-one-subject-out.</p></fn><fn id="table3fn40"><p><sup>an</sup>LOBO: leave-one-batch-out.</p></fn><fn id="table3fn41"><p><sup>ao</sup>HRV: heart rate variability.</p></fn><fn id="table3fn42"><p><sup>ap</sup>DNN: deep neural network. </p></fn><fn id="table3fn43"><p><sup>aq</sup>AUROC: area under the receiver operating characteristic curve.</p></fn><fn id="table3fn44"><p><sup>ar</sup>SPO2: peripheral capillary oxygen saturation.</p></fn><fn id="table3fn45"><p><sup>as</sup>IBI: interbeat interval.</p></fn><fn id="table3fn46"><p><sup>at</sup>ML: machine learning.</p></fn><fn id="table3fn47"><p><sup>au</sup>QDA: quadratic discriminant analysis.</p></fn><fn id="table3fn48"><p><sup>av</sup>WESAD: Wearable Stress and Affect Detection.</p></fn><fn id="table3fn49"><p><sup>aw</sup>SWEET: Stress in the Wild and Everyday Environment.</p></fn><fn id="table3fn50"><p><sup>ax</sup>GYR: gyroscope.</p></fn><fn id="table3fn51"><p><sup>ay</sup>ROC-AUC: receiver operating characteristic&#x2013;area under the curve.</p></fn><fn id="table3fn52"><p><sup>az</sup>GSR: galvanic skin response.</p></fn><fn id="table3fn53"><p><sup>ba</sup>LSTM: long short-term memory.</p></fn><fn id="table3fn54"><p><sup>bb</sup>IoT: internet of things.</p></fn><fn id="table3fn55"><p><sup>bc</sup>VR: virtual reality.</p></fn><fn id="table3fn56"><p><sup>bd</sup>SEL: stacked ensemble learning.</p></fn><fn id="table3fn57"><p><sup>be</sup>LGBoost: Light Gradient Boosting Machine.</p></fn><fn id="table3fn58"><p><sup>bf</sup>CatBoost: categorical boosting.</p></fn><fn id="table3fn59"><p><sup>bg</sup>NB: naive Bayes.</p></fn><fn id="table3fn60"><p><sup>bh</sup>FDA: functional data analysis.</p></fn><fn id="table3fn61"><p><sup>bi</sup>LM: linear model.</p></fn><fn id="table3fn62"><p><sup>bj</sup>LLM: large language model.</p></fn><fn id="table3fn63"><p><sup>bk</sup>BMR: basal metabolic rate.</p></fn><fn id="table3fn64"><p><sup>bl</sup>AB: adaptive boosting.</p></fn><fn id="table3fn65"><p><sup>bm</sup>BAG: bootstrap aggregating.</p></fn><fn id="table3fn66"><p><sup>bn</sup>PRV: pulse rate variability.</p></fn><fn id="table3fn67"><p><sup>bo</sup>RFE: recursive feature elimination.</p></fn><fn id="table3fn68"><p><sup>bp</sup>EEG: electroencephalogram.</p></fn><fn id="table3fn69"><p><sup>bq</sup>DASS-21: Depression Anxiety Stress Scale&#x2013;21 item.</p></fn><fn id="table3fn70"><p><sup>br</sup>RMSE: root mean squared error.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s2-9"><title>Critical Appraisal of Individual Sources of Evidence</title><p>Although critical appraisal is not required for scoping reviews, we conducted an assessment of study quality to better contextualize the strengths and limitations of the included evidence. To address the quality of each paper, we scored every paper across 4 categories on a scale from 0 to 2 as described in <xref ref-type="supplementary-material" rid="app2">Multimedia Appendix 2</xref> and shown in <xref ref-type="supplementary-material" rid="app3">Multimedia Appendix 3</xref>. Given the diverse study designs among the extracted papers, we adopted a methodology similar to that used by De Angel et al [<xref ref-type="bibr" rid="ref155">155</xref>]. This approach integrates the AXIS appraisal tool [<xref ref-type="bibr" rid="ref156">156</xref>] for cross-sectional studies with the Newcastle-Ottawa Scale [<xref ref-type="bibr" rid="ref157">157</xref>] for longitudinal studies. Papers were assessed using a 3-point scoring system: 2 points for fully meeting the criteria, 1 point for partial fulfillment, and 0 points for nonfulfillment.</p><p>Effect measures extracted from the included studies consisted of accuracy, <italic>F</italic><sub>1</sub>-score, sensitivity, specificity, precision, recall, and other performance metrics reported for stress detection. These measures were used to compare model performance across studies. For population characteristics, the mean age and corresponding SDs were extracted whenever available.</p></sec><sec id="s2-10"><title>Synthesis of Results</title><p>Due to differences in study designs, methodologies, and outcome reporting, results were synthesized descriptively. Key study characteristics, signals measured, algorithms used, and sensor types were organized into structured tables to enable comparison across studies. Frequencies of the most commonly measured signals, best-performing algorithms, and most-used sensors were calculated and visualized using bar plots. Missing summary statistics were extracted as reported, with no additional transformations applied. No meta-analysis, subgroup analysis, or meta-regression was conducted; instead, the synthesis focused on identifying overarching trends across the included studies. Because the focus of this review was to characterize stress detection methods used among college-aged populations, we extracted data elements that were directly relevant to the review objectives, including participant characteristics, sensor types, physiological signals, analytical methods, and model performance outcomes. Broader intervention-related data items (eg, intervention protocols, adverse event reporting, and clinical outcome metrics) did not apply to the observational and experimental studies included in this review. Therefore, the extraction approach was intentionally streamlined to ensure consistency, interpretability, and comparability across heterogeneous study designs. In addition, we developed an evidence gap map to conceptually organize and summarize the literature across study conditions, methodological enablers, analytical approaches, barriers, and outcomes, highlighting recurring patterns as well as persistent gaps, following a prior standardized method [<xref ref-type="bibr" rid="ref158">158</xref>].</p></sec><sec id="s2-11"><title>Ethical Considerations</title><p>This study is a systematic review of previously published literature and did not involve the collection of primary data from human participants. No new data were generated, and no individuals were directly recruited, observed, or intervened upon as part of this research. Accordingly, a formal review by an Institutional Review Board or Research Ethics Board was not sought. This determination is consistent with standard guidance that systematic reviews relying exclusively on publicly available, previously published data do not constitute human participant research requiring ethics board oversight.</p><p>All included studies were previously published in peer-reviewed journals and were assumed to have undergone appropriate ethical review by their respective authors and institutions before publication. No personally identifiable information was accessed, extracted, or reported at any stage of this review. The conduct of this review adhered to the ethical principles outlined in the World Medical Association Declaration of Helsinki and complied with applicable institutional, regional, and international standards for research integrity.</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><sec id="s3-1"><title>Selection of Sources of Evidence</title><p>Records were screened from IEEE Xplore, ACM Digital Library, Embase, and PubMed, with most records coming from technical journals. A total of 134 studies were included in the review out of the original 792 records, as illustrated in <xref ref-type="fig" rid="figure1">Figure 1</xref> and <xref ref-type="supplementary-material" rid="app4">Multimedia Appendix 4</xref>. Forty-eight records were removed after deduplication. Of the remaining records, 483 were excluded after 744 abstracts were screened for relevance. In total, 127 records were excluded after 261 full texts were screened for relevance and correct population. Summary characteristics of the final 134 included studies are provided in <xref ref-type="fig" rid="figure1">Figure 1</xref> and <xref ref-type="table" rid="table1">Table 1</xref>.</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>PRISMA (Preferred Reporting Items for Systematic Reviews and Meta-Analyses) flow diagram for study selection from medical and computer science databases.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="mhealth_v14i1e64144_fig01.png"/></fig></sec><sec id="s3-2"><title>Demographic and Geographic Characteristics of Included Studies</title><p>Our population of interest included college-aged students aged 18&#x2010;24 years. In terms of sex demographics, about 72.4% (97/134) of studies specified the number of participants who were male, female, or nonbinary. Among the selected studies, 2 of 134 (1.5%) studies [<xref ref-type="bibr" rid="ref39">39</xref>,<xref ref-type="bibr" rid="ref137">137</xref>] failed to mention a sample size. Across the studies that reported sex distribution, most had a higher proportion of male participants than female, indicating a demographic imbalance that may limit the generalizability of findings. In terms of racial demographics, in 42 papers published from 2020 to 2022, about 9.5% (n=13) of papers included race distribution across their sample population, and 21 (50%) studies included other relevant health information, including preexisting conditions, mental health, and underlying illnesses. From papers published from 2020 to 2025, 26 (19.4%) studies were conducted in Europe, 14 (10.4%) studies were conducted in Asia, 2 (1.5%) studies were conducted in the Middle East, 21 (15.7%) studies were conducted in the United States, 3 (2.2%) studies were conducted in South America, and other studies did not explicitly mention where they were conducted. The higher number of studies conducted in Europe and the United States compared to Asia and other regions suggests regional variations in digital health adoption, research funding, and accessibility of wearable technologies. These differences may influence trends in stress detection research, highlighting the need for region-specific digital health strategies to address varying technological infrastructures, health care priorities, and user needs.</p></sec><sec id="s3-3"><title>Study Design and Data Collection Characteristics</title><p>More than half (62.8%, n=134) of the studies used preexisting datasets to implement their method of stress measurement. The rest of the studies were experimental in nature and carried out &#x201C;de novo&#x201D; data collection. Seventeen studies [<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref46">46</xref>,<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref57">57</xref>,<xref ref-type="bibr" rid="ref61">61</xref>,<xref ref-type="bibr" rid="ref65">65</xref>,<xref ref-type="bibr" rid="ref72">72</xref>,<xref ref-type="bibr" rid="ref73">73</xref>] were longitudinal in nature, published from 2020 to 2022, and 10 studies [<xref ref-type="bibr" rid="ref3">3</xref>,<xref ref-type="bibr" rid="ref76">76</xref>,<xref ref-type="bibr" rid="ref82">82</xref>,<xref ref-type="bibr" rid="ref89">89</xref>,<xref ref-type="bibr" rid="ref91">91</xref>,<xref ref-type="bibr" rid="ref95">95</xref>,<xref ref-type="bibr" rid="ref99">99</xref>,<xref ref-type="bibr" rid="ref105">105</xref>,<xref ref-type="bibr" rid="ref152">152</xref>,<xref ref-type="bibr" rid="ref154">154</xref>] were longitudinal in nature, published from 2023 to 2025, meaning data were collected for the same study population over a period rather than collected at one time point cross-sectionally. These longitudinal data consist of repeated observations at the individual level rather than data collected at multiple time points across different populations. Individual-level effects are confounded with cohort effects in cross-sectional studies, so being able to isolate and study the effect of time as a repeated measure is critical. Of these longitudinal studies published from 2020 to 2022, 2 were clear in addressing how they handled missing data. These studies either imputed missing values with each person&#x2019;s channel-wise mean values of the day, where days with &#x003E;25% sensor data missing were discarded [<xref ref-type="bibr" rid="ref45">45</xref>], or removed missing data [<xref ref-type="bibr" rid="ref57">57</xref>]. It is difficult to collect comprehensive, complete data from sensors longitudinally, where data are not always complete for each participant. About 6.6% (n=9) of studies included a recruitment method for participants. Two studies used volunteers, and 1 study invited participants to participate.</p></sec><sec id="s3-4"><title>Approaches in Stress Detection Research</title><p>The extracted studies were classified into 3 primary methodological categories: algorithm comparisons (shown in <xref ref-type="table" rid="table2">Table 2</xref>), the development of custom stress measurement frameworks, and statistical analyses (illustrated in <xref ref-type="table" rid="table3">Table 3</xref>). Studies focusing on algorithm comparison primarily used 2 approaches: machine learning models, such as support vector machines (SVMs), random forest (RF), k-nearest neighbors, and extreme gradient boosting (XGBoost), which used handcrafted features for stress detection, or deep learning methods, such as convolutional neural networks (CNNs), to automatically extract relevant features [<xref ref-type="bibr" rid="ref159">159</xref>]. Among the studies reviewed, SVM demonstrated the highest performance, with 33.3% (n=45) of papers identifying it as the best-performing algorithm, as illustrated in <xref ref-type="fig" rid="figure2">Figure 2</xref>. In comparison, 11.1% (n=15) of the studies reported CNN as the best-performing model [<xref ref-type="bibr" rid="ref50">50</xref>,<xref ref-type="bibr" rid="ref58">58</xref>,<xref ref-type="bibr" rid="ref67">67</xref>,<xref ref-type="bibr" rid="ref103">103</xref>]. One study evaluated 3 boosting algorithms&#x2014;XGBoost, Light Gradient Boosting Machine, and CatBoost&#x2014;tree-based ensemble methods that iteratively improve weak learners to enhance classification, evaluating the effectiveness of data fusion methods [<xref ref-type="bibr" rid="ref115">115</xref>].</p><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>Best-performing algorithms across 36 studies comparing established methods. ANN: artificial neural network, CNN: convolutional neural network, DT: decision trees, Extra Trees: extremely randomized trees, GB: gradient boosting, KNN: k-nearest neighbor, LightGBM: light gradient boosting machine, LSTM: deep long short-term memory, NN: neural network, RF: random forest, SVM: support vector machine, XGBoost: extreme gradient boosting.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="mhealth_v14i1e64144_fig02.png"/></fig><p>One paper [<xref ref-type="bibr" rid="ref31">31</xref>] focused on comparing long short-term memory (LSTM) and a combination of LSTM and CNN. This study found LSTM alone to perform better. Two studies in <xref ref-type="table" rid="table3">Table 3</xref> that focused on a single framework supported the use of a single modality or a modest set of signals [<xref ref-type="bibr" rid="ref66">66</xref>,<xref ref-type="bibr" rid="ref69">69</xref>]. Studies that focused on the comparison of chest and wearable devices found chest devices to perform better [<xref ref-type="bibr" rid="ref84">84</xref>,<xref ref-type="bibr" rid="ref109">109</xref>], but chest devices in combination with wrist devices performed the best [<xref ref-type="bibr" rid="ref71">71</xref>]. Most of these studies focused on time-agnostic algorithms, as shown in <xref ref-type="table" rid="table2">Table 2</xref>. We also found studies using wrist wearables (eg, Empatica, Microsoft Smartband 2, Fitbit Charge 2, and Samsung Galaxy Gear Sport Watches) and chest-worn devices, with core physiological signals such as EDA, galvanic skin response, HR, photoplethysmography, HRV, respiration, or temperature, evaluated using k-fold cross-validation, leave-one-out cross-validation, or leave-one-subject-out evaluation, and reported performance metrics such as <italic>F</italic><sub>1</sub>-score, accuracy, precision, and recall. In the &#x201C;best&#x201D; column, classic machine learning models were most often SVM, followed by RF, while deep learning wins were fewer (occasional CNN, deep neural network, and a single LSTM). Few studies in <xref ref-type="table" rid="table2">Table 2</xref> incorporated nonphysiological or contextual signals [<xref ref-type="bibr" rid="ref61">61</xref>,<xref ref-type="bibr" rid="ref72">72</xref>,<xref ref-type="bibr" rid="ref73">73</xref>]. Recent studies examining the association between sleep and stress have leveraged data from the Oura Ring [<xref ref-type="bibr" rid="ref3">3</xref>,<xref ref-type="bibr" rid="ref99">99</xref>]. Two recent studies using Garmin smartwatch&#x2013;derived stress scores found significant associations with high- and moderate-intensity positive mood in 1 study [<xref ref-type="bibr" rid="ref154">154</xref>], while another reported a stronger association of HR with self&#x2010;reported stress, and the Garmin stress score demonstrated marginal predictive value [<xref ref-type="bibr" rid="ref24">24</xref>].</p><p>Studies mainly aggregated stress on a binary or 3-tier scale, meaning participants were either identified as stressed or not stressed, as opposed to being measured on a continuous scale. Here, a continuous scale captures stress fluctuation over time rather than binary or categorical labels. Sensors or tools used to measure physiological signals included various wrist, chest, and full-body sensors alongside mobile surveys. <xref ref-type="fig" rid="figure3">Figure 3</xref> details the various devices used and shows that wrist sensors, in general, were the most widely used sensor type. About 72.4% (n=97) of the studies used well-validated stress tests or tasks for their models, such as the TSST [<xref ref-type="bibr" rid="ref160">160</xref>], mental arithmetic tests, video stimuli, the Stroop color word test, startle response tests, cold-pressor tests, or public speaking, to reliably trigger stress responses while incorporating restful periods as a baseline [<xref ref-type="bibr" rid="ref22">22</xref>]. About 8.3% (n=11) of the studies used self-reported SMS text messaging surveys in their supervised machine learning models. The various physiological features and signals measured are illustrated in <xref ref-type="fig" rid="figure4">Figure 4</xref>. The most common signal was EDA, appearing in 57.5% (n=77) of studies. <xref ref-type="fig" rid="figure4">Figure 4</xref> shows the top signals measured per study, including instances where papers used multiple signals together.</p><fig position="float" id="figure3"><label>Figure 3.</label><caption><p>Top 10 sensors used across all 134 studies.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="mhealth_v14i1e64144_fig03.png"/></fig><fig position="float" id="figure4"><label>Figure 4.</label><caption><p>Distribution of top physiological signals used in reviewed studies, including ecological momentary assessment (EMA) as a self-report measure. Many studies used multiple signals, which are counted in the bar plot. ACC: acceleration, BVP: blood volume pulse, ECG: electrocardiography, EDA: electrodermal activity, EEG: electroencephalogram, EMG: electromyography, GSR: galvanic skin response, HR: heart rate, HRV: heart rate variability, PPG: photoplethysmography, RESP: respiration, SC: skin conductance, TEMP: temperature.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="mhealth_v14i1e64144_fig04.png"/></fig></sec><sec id="s3-5"><title>Most Commonly Used Wearable Stress and Affect Detection Datasets in Stress Detection</title><p>Of the 62.8% of studies that used some preexisting datasets, around 80% (n=67) used the Wearable Stress and Affect Detection (WESAD) dataset, for instance, including papers published from 2020 to 2022 [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref38">38</xref>,<xref ref-type="bibr" rid="ref42">42</xref>,<xref ref-type="bibr" rid="ref43">43</xref>,<xref ref-type="bibr" rid="ref48">48</xref>,<xref ref-type="bibr" rid="ref55">55</xref>,<xref ref-type="bibr" rid="ref56">56</xref>,<xref ref-type="bibr" rid="ref58">58</xref>,<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref63">63</xref>,<xref ref-type="bibr" rid="ref66">66</xref>-<xref ref-type="bibr" rid="ref71">71</xref>] or a few published from 2023 to 2025 [<xref ref-type="bibr" rid="ref2">2</xref>,<xref ref-type="bibr" rid="ref28">28</xref>,<xref ref-type="bibr" rid="ref75">75</xref>,<xref ref-type="bibr" rid="ref79">79</xref>,<xref ref-type="bibr" rid="ref86">86</xref>,<xref ref-type="bibr" rid="ref94">94</xref>,<xref ref-type="bibr" rid="ref103">103</xref>,<xref ref-type="bibr" rid="ref113">113</xref>,<xref ref-type="bibr" rid="ref122">122</xref>,<xref ref-type="bibr" rid="ref141">141</xref>]. This dataset was commonly referenced in papers included in this review. This dataset is publicly available and is a widely used dataset for stress and affect detection [<xref ref-type="bibr" rid="ref161">161</xref>]. The mean age of participants is 27.5 years with a SD of 2.4 years. The sample included 3 females and 12 males for a total of 15 participants. Heavy smokers and pregnant women were excluded, and the participants were composed of graduate students. The signals collected include physiological and motion data from chest-worn and wrist-worn devices. Measurements include blood volume pulse, ECG, EDA, electromyography, respiration, body temperature, and 3-axis acceleration. The protocol used elicits 3 emotional states: baseline, stress, and amusement, followed by a meditation phase. Benchmarks for comparison used the well-studied stress induction method, the TSST, with 0.93 accuracy and 0.91 <italic>F</italic><sub>1</sub>-score for distinguishing stress, using a linear discriminant analysis classifier, using only chest-based physiological signals.</p><p>Although many papers used this same dataset, they experimented with different physiological signals as well as motion data when extracting features for modeling. Modeling and validation methods also varied. The algorithms with the best performance when applied to the WESAD dataset included SVM, RF, XGBoost, k-nearest neighbor, decision tree, deep neural network, self-supervised learning, artificial neural networks, large language models, and CNN. In addition to WESAD, recently published papers used other datasets, including SWELL [<xref ref-type="bibr" rid="ref29">29</xref>], AffectiveROAD [<xref ref-type="bibr" rid="ref81">81</xref>], VerBIO [<xref ref-type="bibr" rid="ref96">96</xref>], S-TEST, or DS-3 [<xref ref-type="bibr" rid="ref101">101</xref>].</p></sec><sec id="s3-6"><title>Quality Assessment of Included Studies</title><p><xref ref-type="fig" rid="figure5">Figure 5</xref> shows a breakdown of quality score assessments for all extracted papers, broken down into 4 categories. Papers were scored 0, 1, or 2 for each category. An explanation of each category&#x2019;s scoring is provided in <xref ref-type="supplementary-material" rid="app2">Multimedia Appendix 2</xref>, and the individual score breakdown by category for each paper is provided in <xref ref-type="supplementary-material" rid="app3">Multimedia Appendix 3</xref>. In general, outcomes and sample descriptions were clearly stated, with most papers having a quality score of 2. However, representativeness and justification of sample size were areas in which many papers did not perform as well. Representativeness was cited as a common issue across many papers, as samples were limited due to recruitment processes for participants or the data that were available. The samples were also limited by age due to the demographic of interest in this review. Around 27.6% (n=37) of papers failed to give sex demographic information. Most papers analyzed used experimental data from other sources or open-source, publicly accessible datasets such as the WESAD dataset, which did not justify the chosen sample size. From papers published from 2020 to 2022, only 2% of papers failed to give sample size information; however, sample size justification was rarely given, although the papers that did address this issue cited their voluntary recruitment process as a limitation. Almost none of the studies analyzed did a power analysis to determine sample size before running the stress studies, which is a major shortcoming. Across recent papers published from 2023 to 2025, almost all clearly defined outcomes and described their samples, but very few addressed representativeness, and only 3 papers [<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref143">143</xref>,<xref ref-type="bibr" rid="ref154">154</xref>] justified their sample size published, highlighting a major gap in methodological rigor.</p><fig position="float" id="figure5"><label>Figure 5.</label><caption><p>Quality of the literature in each domain. The figure shows the scoring across papers in each category from 0 to 2, with 0 indicating not fulfilled, 1 indicating partially fulfilled, and 2 indicating fulfilled.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="mhealth_v14i1e64144_fig05.png"/></fig><p>Finally, these findings point to substantial heterogeneity and a meaningful risk of bias across the included studies. The wide variation in sample sizes, inconsistent reporting of demographic characteristics, limited disclosure of health information, and strong geographic skew toward Europe and the United States contribute to structural differences that complicate direct comparison of results. This heterogeneity is further shaped by the heavy reliance on the WESAD dataset, a publicly available dataset with only 15 predominantly male participants, with a mean age of 27.5 years, which results in many studies concluding a small and demographically narrow sample. Such repeated use of a single dataset increases the likelihood that reported model performance reflects the characteristics of WESAD participants rather than capturing variability among college-aged students. Accordingly, the synthesized findings should be interpreted with caution, acknowledging that both heterogeneity in study design and risk of bias in sampling and reporting may influence observed performance patterns and limit the extent to which results can be generalized. Using a relational synthesis approach, <xref ref-type="fig" rid="figure6">Figure 6</xref> presents an evidence gap map that synthesizes methodological enablers, study conditions, stress prediction approaches, barriers, and outcomes observed across the included studies. The map illustrates a research landscape shaped by publicly available datasets, standardized in-laboratory stress protocols, and widespread use of wrist-worn physiological sensors. At the same time, it highlights recurring constraints including a predominance of laboratory-based study designs, heavy reliance on publicly available datasets, and limited demographic representativeness. While many studies report strong classification performance using classical machine learning models under controlled conditions, comparatively fewer examine temporal stress dynamics, personalization, or real-world deployment.</p><fig position="float" id="figure6"><label>Figure 6.</label><caption><p>Gap map summarizing methodological enablers, study conditions, modeling approaches, barriers, and outcomes in wearable-based stress prediction studies among college students. ACC: acceleration, BVP: blood volume pulse, ECG: electrocardiography, EDA: electrodermal activity, EEG: electroencephalogram, EMA: ecological momentary assessment, EMG: electromyography, GSR: galvanic skin response, HR: heart rate, HRV: heart rate variability, PPG: photoplethysmography, RESP: respiration, SC: skin conductance, TEMP: temperature.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="mhealth_v14i1e64144_fig06.png"/></fig></sec></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Overview</title><p>In this scoping review, we examined how stress is measured among college-aged students using wearable technologies and machine learning methods between 2020 and 2025, to identify commonly used wearables, the most informative physiological signals, and the best-performing algorithms. Across the literature, we found that SVMs among traditional machine learning models and CNNs among deep learning models were the strongest performers for stress classification. Wrist-worn devices were the predominant sensor platform, and EDA was the most frequently measured and most informative signal. However, most studies relied on small, homogeneous samples, frequently used controlled laboratory datasets such as WESAD, and commonly used binary (stressed vs not stressed) labeling approaches, raising concerns about representativeness and ecological validity. Our quality assessment further revealed inconsistent demographic reporting, insufficient justification of sample sizes, limited attention to social determinants of stress, and substantial variation in how psychological stress was defined, elicited, and validated across studies.</p></sec><sec id="s4-2"><title>Modeling Approaches for Stress Prediction</title><p>Regarding stress prediction model performance, the strong performance of SVMs can be attributed to their robustness in handling high-dimensional physiological data [<xref ref-type="bibr" rid="ref33">33</xref>,<xref ref-type="bibr" rid="ref144">144</xref>], their ability to generalize well by maximizing the margin between classes, and their effectiveness in small and imbalanced datasets, which are common in stress detection studies [<xref ref-type="bibr" rid="ref162">162</xref>]. Additionally, the flexibility of SVM in using different kernel functions [<xref ref-type="bibr" rid="ref163">163</xref>] allows them to model complex, nonlinear relationships in physiological signals without requiring deep feature extraction. These advantages likely contribute to their superior performance compared with other traditional machine learning models in stress classification. However, SVMs are computationally expensive and may not be practical for real-time applications [<xref ref-type="bibr" rid="ref164">164</xref>]. More efficient and scalable approaches are needed to enhance practicality in the field. Deep learning models, particularly CNNs, outperformed traditional machine learning approaches in comparative analyses [<xref ref-type="bibr" rid="ref82">82</xref>,<xref ref-type="bibr" rid="ref85">85</xref>]. Although CNNs capture spatial patterns in temporal data, they do not have memory in their architecture, reducing their effectiveness on longitudinal temporal data [<xref ref-type="bibr" rid="ref165">165</xref>], indicating a need for algorithms that explicitly model temporal patterns, such as RNNs [<xref ref-type="bibr" rid="ref74">74</xref>]. One study focusing on the comparison of various machine learning and deep learning methods attempted to use a version of an RNN in the form of an LSTM. This paper reported the greatest performance with LSTM alone, as opposed to a combination of LSTM and CNN, indicating some value in noting and using temporal patterns. In addition, emerging evaluations of large language models for stress prediction [<xref ref-type="bibr" rid="ref135">135</xref>] did not perform well and suggest that parameter count does not consistently correlate with performance. For example, GPT-3.5-Turbo performed comparably to GPT-4 on WESAD [<xref ref-type="bibr" rid="ref109">109</xref>]. These findings indicate that identifying key biomarkers is essential for improving model efficiency [<xref ref-type="bibr" rid="ref115">115</xref>]. From 2023 to 2025, published literature emphasized personalization and multitask learning to enhance stress-prediction performance and generalizability [<xref ref-type="bibr" rid="ref70">70</xref>,<xref ref-type="bibr" rid="ref79">79</xref>,<xref ref-type="bibr" rid="ref98">98</xref>,<xref ref-type="bibr" rid="ref107">107</xref>,<xref ref-type="bibr" rid="ref112">112</xref>,<xref ref-type="bibr" rid="ref127">127</xref>]. In addition, 1 study explored stress detection in a virtual reality environment integrated with an Internet of Things system, demonstrating the potential of immersive technologies for stress monitoring [<xref ref-type="bibr" rid="ref85">85</xref>].</p></sec><sec id="s4-3"><title>Wearable Technologies and Physiological Signals</title><p>Wrist wearables were most commonly considered [<xref ref-type="bibr" rid="ref166">166</xref>] as they seem less encumbering than full body or chest wearables [<xref ref-type="bibr" rid="ref22">22</xref>] while attaining better measurement of physiological signals than surveys or smartphones. Other wearable sensors used across studies included chest wearables, full body sensors, or some combination of chest and wrist wearable signals. We saw that EDA was the most frequently measured signal across papers and is important in stress detection [<xref ref-type="bibr" rid="ref167">167</xref>], since it provides valuable information about a person&#x2019;s sympathetic nervous system activity, which is closely linked to emotional responses, including stress. Most papers used multiple signals in their model building, with EDA most commonly contributing to a more accurate model. For instance, building a stress detection model incorporating both HR and EDA [<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref81">81</xref>] data might allow for a more comprehensive, accurate, and context-aware assessment of stress and other emotional responses. Ensuring the reliability and reproducibility of physiological measurements is crucial for real-world stress detection [<xref ref-type="bibr" rid="ref26">26</xref>]. Variability in sensor accuracy, signal quality, and environmental factors can impact consistency [<xref ref-type="bibr" rid="ref22">22</xref>]. Validating models across diverse settings improves generalizability and practical applicability [<xref ref-type="bibr" rid="ref168">168</xref>].</p></sec><sec id="s4-4"><title>Conceptualizing and Measuring Psychological Stress</title><p>We saw that most studies used a binary model of stress in which an individual is identified as either stressed or not stressed. A few studies extended beyond binary classification by using multiclass stress prediction (eg, 3-class [<xref ref-type="bibr" rid="ref62">62</xref>] or 5-class [<xref ref-type="bibr" rid="ref59">59</xref>,<xref ref-type="bibr" rid="ref125">125</xref>] models), which allows a somewhat finer-grained view but still treats stress as discrete states. There is a need for a model more in line with how human stress manifests, such as a continuous scale [<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref169">169</xref>]. For example, an individual might feel mildly stressed, which is worth noting and which cannot be captured on a binary scale of stress [<xref ref-type="bibr" rid="ref150">150</xref>]. On a binary scale, mild stress may be interpreted as either diminished or heightened stress. A continuous scale for stress monitoring is valuable for capturing individual differences and for understanding the dynamic nature of stress [<xref ref-type="bibr" rid="ref150">150</xref>].</p><p>We found a lack of detailed explanations on how psychological stress was identified. Accurately distinguishing psychological stress from other physiological responses is complex, as HR alone is insufficient for stress detection [<xref ref-type="bibr" rid="ref154">154</xref>]. For example, HR alone cannot reliably indicate stress, as an elevated HR may result from various factors [<xref ref-type="bibr" rid="ref170">170</xref>], such as jogging or facing an unprepared mathematics test. A stress detection model based solely on HR data could misclassify natural variations in HR, such as those caused by excitement or physical activity during social events, as stress, leading to inaccurate assessments [<xref ref-type="bibr" rid="ref169">169</xref>]. One critical detail to note in studies of stress is the differentiation between physiological and mental stress. This distinction is complicated for wearable devices [<xref ref-type="bibr" rid="ref154">154</xref>]. To accommodate this, studies need to look at a participant&#x2019;s resting data while they are confirmed to be stressed, as well as their accelerometer data, if necessary, to check movement patterns, and consider these factors while detecting significant stress moments [<xref ref-type="bibr" rid="ref169">169</xref>]. One&#x2019;s activity must be noted to clearly identify psychological stress. Many studies used some well-validated stress tasks to account for this concern, but could benefit from clearer explanations of how their stress tasks accommodate this issue. These stress tasks mostly used tests such as mental arithmetic, Stroop test, public speaking, or cold-pressor tests, with participants putting their hands in ice water, to benchmark stress [<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref26">26</xref>]. By contrast, other datasets (eg, &#x201C;A Wearable Exam Stress Dataset for Predicting Cognitive Performance in Real-World Settings&#x201D; [<xref ref-type="bibr" rid="ref124">124</xref>]) inferred stress levels indirectly from examination grades, raising concerns about the accuracy of stress labeling. Studies that did not incorporate a stress task often used self-report surveys to monitor whether someone is stressed [<xref ref-type="bibr" rid="ref168">168</xref>,<xref ref-type="bibr" rid="ref171">171</xref>]. Self-report measures often face challenges with accuracy and completeness [<xref ref-type="bibr" rid="ref172">172</xref>]. While frequent and timely survey prompts can improve accuracy, they do not fully address issues of completeness. Additionally, repeated survey checks may increase participant burden, potentially leading to survey fatigue and lower response rates [<xref ref-type="bibr" rid="ref173">173</xref>]. There is also a need for better transparency regarding the wording of questions and the frequency of surveys to ensure consistency and minimize bias [<xref ref-type="bibr" rid="ref174">174</xref>].</p></sec><sec id="s4-5"><title>Concerns Related to Study Design and Reporting</title><p>When analyzing the quality of research, we saw a need for larger sample sizes [<xref ref-type="bibr" rid="ref175">175</xref>]. Larger sample sizes help reduce bias, provide a better representation of the target population, and lower the impact of outlier participants [<xref ref-type="bibr" rid="ref176">176</xref>]. We observed that many studies relied on the WESAD dataset [<xref ref-type="bibr" rid="ref177">177</xref>], a widely used dataset for stress and affect detection. However, WESAD includes only 15 participants, making it a limited representation of the college student population. Additionally, since WESAD data were collected in a controlled laboratory setting, they do not reflect real-world (&#x201C;in the wild&#x201D;) stress detection, where external factors and daily life variability play a significant role [<xref ref-type="bibr" rid="ref171">171</xref>,<xref ref-type="bibr" rid="ref178">178</xref>,<xref ref-type="bibr" rid="ref179">179</xref>]. In fact, 1 study that used WESAD achieved strong performance under laboratory conditions but failed to generalize effectively in real-world settings [<xref ref-type="bibr" rid="ref119">119</xref>], further underscoring the limitations of laboratory-based datasets.</p><p>Many studies did not report racial or ethnic demographics or have a representative sample regarding sex. This was a commonly identified issue within papers, as many samples relied on volunteers. Many papers also failed to report on other demographics of their samples besides sex or ethnicity, such as populations for exclusion. This includes excluding populations taking certain medications, populations with certain mental health histories, populations engaging in drug use, or pregnant populations. Knowing the populations for exclusion is crucial for replicability and transparency, as well as for bias detection and interpretation of results [<xref ref-type="bibr" rid="ref180">180</xref>-<xref ref-type="bibr" rid="ref182">182</xref>]. Although our population of interest was students, there is a need for more varied student demographics in samples regarding sex, race, and ethnicity, capturing different social determinants [<xref ref-type="bibr" rid="ref183">183</xref>]. Given that stress is influenced by various social determinants [<xref ref-type="bibr" rid="ref184">184</xref>,<xref ref-type="bibr" rid="ref185">185</xref>], future studies should incorporate factors such as socioeconomic status, neighborhood context, physical environment, racial minority representation, and health-lifestyle interactions [<xref ref-type="bibr" rid="ref186">186</xref>]. Including these elements would provide a more comprehensive understanding of stress in college students. One paper mentioned that its sample may not be representative because participants were recruited from an elite, private university [<xref ref-type="bibr" rid="ref32">32</xref>]. Along these lines, there is a need for better justification of sample selection as well as sample size. Finally, missing data present a significant challenge in stress studies, affecting both comparability across studies and the reliability of findings [<xref ref-type="bibr" rid="ref187">187</xref>]. The way missing data is handled, whether through imputation, exclusion, or other techniques, can influence study outcomes and lead to biased conclusions [<xref ref-type="bibr" rid="ref188">188</xref>]. There is a need for more complete data and more detailed descriptions of how missing data were handled, particularly in longitudinal studies [<xref ref-type="bibr" rid="ref189">189</xref>].</p></sec><sec id="s4-6"><title>Relationship to Prior Reviews and Contribution of This Work</title><p>Prior literature reviews have explored various aspects of stress detection using wearable technology and machine learning. A meta-analysis examined the effectiveness of wearable AI in diagnosing and predicting stress among students, while emphasizing the need for real-world validation and improvements [<xref ref-type="bibr" rid="ref190">190</xref>]. Another review categorized stress detection approaches based on different wearable sensor types and environments such as driving, studying, and working [<xref ref-type="bibr" rid="ref191">191</xref>]. A separate study systematically assessed biosignal responses to psychological stress, analyzing electroencephalogram, ECG, EDA, HRV, respiration, and temperature to evaluate their reliability and consistency [<xref ref-type="bibr" rid="ref192">192</xref>]. A prior review also examined machine learning techniques used in stress monitoring research, focusing on model generalization when training on public datasets [<xref ref-type="bibr" rid="ref20">20</xref>]. Another review focused on wearable technologies and smart devices for detecting depression, anxiety, and stress, discussing physiological markers such as HRV, EDA, and electroencephalogram, along with their market availability [<xref ref-type="bibr" rid="ref193">193</xref>]. Finally, a review analyzed physiological parameters such as HR, temperature, humidity, blood pressure, and speech, exploring various stress detection sensors and machine learning-based classification techniques [<xref ref-type="bibr" rid="ref194">194</xref>]. Our scoping review extends this literature by specifically focusing on stress measurement in college-aged students, reviewing recent papers published from January 2020 to December 2025, analyzing common datasets, sensor types, and the best-performing machine learning algorithms used in research. We also evaluate weaknesses in current methodologies through a quality assessment while identifying best practices in study design, feature selection, sensor use, and algorithmic approaches.</p><p>Taken together, the findings of this scoping review highlight that progress in wearable-based stress detection for college-aged students [<xref ref-type="bibr" rid="ref3">3</xref>,<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref46">46</xref>,<xref ref-type="bibr" rid="ref73">73</xref>] is constrained primarily by methodological and conceptual design choices rather than sensor availability for digital phenotyping of stress [<xref ref-type="bibr" rid="ref195">195</xref>] or algorithmic capacity [<xref ref-type="bibr" rid="ref18">18</xref>,<xref ref-type="bibr" rid="ref28">28</xref>,<xref ref-type="bibr" rid="ref30">30</xref>]. While multimodal physiological sensing, particularly EDA combined with cardiac measures, shows consistent promise [<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref26">26</xref>], the field remains highly reliant on small, controlled datasets such as WESAD [<xref ref-type="bibr" rid="ref177">177</xref>] and binary stress formulations that fail to capture the continuous [<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref169">169</xref>], context-dependent nature of stress in students&#x2019; daily lives [<xref ref-type="bibr" rid="ref171">171</xref>]. Advancing this area will require a shift toward larger [<xref ref-type="bibr" rid="ref175">175</xref>], more diverse cohorts that reflect different social determinants of health [<xref ref-type="bibr" rid="ref186">186</xref>], and real-world datasets that support generalizable human behavior modeling [<xref ref-type="bibr" rid="ref168">168</xref>,<xref ref-type="bibr" rid="ref196">196</xref>]; along with transparent reporting of participant characteristics, exclusion criteria, and missing data handling [<xref ref-type="bibr" rid="ref189">189</xref>]; and modeling approaches that explicitly account for temporal patterns [<xref ref-type="bibr" rid="ref95">95</xref>], personalization [<xref ref-type="bibr" rid="ref1">1</xref>], and contextual information from students&#x2019; behavioral patterns [<xref ref-type="bibr" rid="ref152">152</xref>]. These improvements are not only methodological but also ethical, and without representative samples and robust validation in real-world settings, stress detection systems might risk reinforcing bias [<xref ref-type="bibr" rid="ref197">197</xref>] and producing misleading inferences when deployed in student populations [<xref ref-type="bibr" rid="ref183">183</xref>]. By synthesizing recent evidence and identifying persistent gaps, this review provides a foundation for designing more reliable, interpretable, and equitable stress monitoring systems that can support just-in-time interventions and inform institutional strategies to improve student mental health [<xref ref-type="bibr" rid="ref5">5</xref>].</p></sec><sec id="s4-7"><title>Limitations</title><p>Our focused and systematic approach targeting stress in college students in recent years allows for a more detailed analysis. Recency allows for analysis of the most up-to-date and commonly used sensors as well as the newest algorithms. By systematically categorizing the approach taken by each study, along with the devices used and signals measured, we can synthesize the information, establish trends, and make conclusions about best-performing methods and practices. Many studies relied on commonly used datasets, such as WESAD. Using the same dataset across different research projects enables benchmarking, allowing for direct comparison of methodologies and an understanding of why results may vary across approaches. A common challenge in the reviewed papers was the inclusion of multiple populations or datasets within a single study. While our primary focus was on college students, some papers analyzed mixed populations or multiple datasets. However, as long as college students were included, these studies were still considered in our review. Many papers also used overlapping datasets such as the WESAD dataset, although different papers used different parts of the dataset along with different models. This may lead to some redundancy in findings. The commonly used dataset, WESAD, with only 15 participants, had limited sample sizes, introducing potential bias and reducing the likelihood of capturing a truly representative population. Additionally, only studies published in English were included, as this was the language accessible to our reviewers, which may have led to the exclusion of relevant research.</p></sec><sec id="s4-8"><title>Conclusions</title><p>This scoping review provides a focused synthesis of wearable- and digital tool&#x2013;based stress detection research specifically among college-aged students, a population often overlooked or aggregated with broader adult samples in prior reviews. Current research highlights the need for larger and more diverse samples to improve representativeness, as many studies rely on a limited number of existing datasets, potentially leading to overlapping findings. Greater diversity in sex and ethnic demographics, along with clearer justification of sample sizes and improved demographic reporting, is essential for understanding population-level stress patterns. Methodologically, most studies conceptualized stress as a binary state (stressed vs not stressed), failing to capture variations in intensity, such as mild or moderate stress that can be chronic and clinically meaningful. Few studies used algorithms such as RNNs, which can capture temporal patterns, despite the importance of tracking stress progression over time. Greater emphasis on time-dependent modeling could enhance the understanding of how stress evolves. Many studies failed to clearly distinguish between psychological stress and physiological stress responses, despite the critical need for distinct measurement approaches. More precise definitions and methodologies are necessary to differentiate between these 2 aspects of stress effectively. In real-world settings, these limitations constrain the generalizability and clinical usefulness of stress detection systems.</p><p>To strengthen the credibility and generalizability of future research, studies should provide clear justifications for their sample sizes and, where possible, aim to recruit larger cohorts that reduce bias and improve statistical reliability. The field would also benefit from the development and use of more varied datasets, which can limit overlap across studies and reduce potential sources of bias. Increasing diversity in participant recruitment is essential; researchers should ensure representation across race, sex, socioeconomic status, and environmental contexts, as well as variation in behavioral and lifestyle factors such as sleep duration and efficiency, physical activity, phone usage, social media engagement, and mobility patterns. Detailed demographic reporting should accompany all studies to enhance transparency and enable meaningful comparisons across research efforts. Future analytical approaches should incorporate algorithms capable of capturing temporal patterns to model fluctuations in stress over time. Rather than relying solely on binary stress categorizations, researchers should develop models that characterize stress as a dynamic and progressive state, allowing for the detection of mild, moderate, and chronic stress levels. Clear explanations of baseline stress measurements are also needed to ensure that resting conditions are consistently defined and comparable across studies. Finally, stress prediction models should increasingly focus on personalization while maintaining robust privacy protections for participants.</p></sec></sec></body><back><ack><p>We would like to thank librarian Alissa Cilfone and Lauri Fennell for their consultation regarding database search strategies and the development of search terms. We used a generative artificial intelligence (AI) tool (ChatGPT-5.2; OpenAI) to polish the initial draft of the manuscript and Microsoft 365 Word built-in tools for spell and grammar checks, solely for language refinement, proofreading, summarization, and reformatting to improve the clarity and readability of the manuscript. No generative AI tools were used to generate any scientific content, figures, results, analyses, or interpretations. All citations were identified, verified, and added manually by the authors, and no AI-generated references were used.</p></ack><notes><sec><title>Funding</title><p>This study represents independent research funded by Northeastern University&#x2019;s Project-Based Exploration for the Advancement of Knowledge (PEAK) Experience #2: The Base Camp Award and Northeastern University&#x2019;s FY23 Transforming Interdisciplinary Experiential Research (Tier) 1 Seed Grant: assessing the scalability and feasibility of digitally phenotyping stress.</p></sec></notes><fn-group><fn fn-type="con"><p>AS, OBA, and JA contributed to the literature search and data extraction. AS, OBA, and JO contributed to data analysis and interpretation. All authors contributed to writing the manuscript, and all authors approved the manuscript. All authors guaranteed the integrity of the work. AS and OBA contributed equally to this work and are co-first authors.</p></fn><fn fn-type="conflict"><p>None declared.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">CNN</term><def><p>convolutional neural network</p></def></def-item><def-item><term id="abb2">ECG</term><def><p>electrocardiogram</p></def></def-item><def-item><term id="abb3">EDA</term><def><p>electrodermal activity</p></def></def-item><def-item><term id="abb4">HR</term><def><p>heart rate</p></def></def-item><def-item><term id="abb5">HRV</term><def><p>heart rate variability</p></def></def-item><def-item><term id="abb6">LSTM</term><def><p>long short-term memory</p></def></def-item><def-item><term id="abb7">PRISMA-S</term><def><p>Preferred Reporting Items for Systematic Reviews and Meta-Analyses literature search extension</p></def></def-item><def-item><term id="abb8">PRISMA-ScR</term><def><p>Preferred Reporting Items for Systematic Reviews and Meta-Analyses Extension for Scoping Reviews</p></def></def-item><def-item><term id="abb9">RF</term><def><p>random forest</p></def></def-item><def-item><term id="abb10">SVM</term><def><p>support vector machine</p></def></def-item><def-item><term id="abb11">TSST</term><def><p>Trier Social Stress Test</p></def></def-item><def-item><term id="abb12">WESAD</term><def><p>Wearable Stress and Affect Detection</p></def></def-item><def-item><term id="abb13">XGBoost</term><def><p>extreme gradient boosting</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Hoang</surname><given-names>TH</given-names> </name><name name-style="western"><surname>Dang</surname><given-names>TK</given-names> </name><name name-style="western"><surname>Trang</surname><given-names>NTH</given-names> </name></person-group><article-title>Personalized stress detection for university students using wearable devices</article-title><conf-name>2025 19th International Conference on Ubiquitous Information Management and Communication (IMCOM)</conf-name><conf-date>Jan 3-5, 2025</conf-date><conf-loc>Bangkok, Thailand</conf-loc><fpage>1</fpage><lpage>7</lpage><pub-id pub-id-type="doi">10.1109/IMCOM64595.2025.10857507</pub-id></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gedam</surname><given-names>S</given-names> </name><name name-style="western"><surname>Dutta</surname><given-names>S</given-names> </name><name name-style="western"><surname>Jha</surname><given-names>R</given-names> </name></person-group><article-title>Analyzing mental stress in Indian students through advanced machine learning and wearable technologies</article-title><source>Sci Rep</source><year>2025</year><month>07</month><day>1</day><volume>15</volume><issue>1</issue><fpage>20610</fpage><pub-id pub-id-type="doi">10.1038/s41598-025-06918-6</pub-id><pub-id pub-id-type="medline">40595085</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Bloomfield</surname><given-names>LSP</given-names> </name><name name-style="western"><surname>Fudolig</surname><given-names>MI</given-names> </name><name name-style="western"><surname>Kim</surname><given-names>J</given-names> </name><etal/></person-group><article-title>Predicting stress in first-year college students using sleep data from wearable devices</article-title><source>PLOS Digit Health</source><year>2024</year><month>04</month><volume>3</volume><issue>4</issue><fpage>e0000473</fpage><pub-id pub-id-type="doi">10.1371/journal.pdig.0000473</pub-id><pub-id pub-id-type="medline">38602898</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="web"><article-title>Substance Abuse In College Students: Statistics &#x0026;amp; Rehab Treatment</article-title><source>American Addiction Centers</source><year>2024</year><access-date>2023-06-29</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://americanaddictioncenters.org/blog/college-coping-mechanisms">https://americanaddictioncenters.org/blog/college-coping-mechanisms</ext-link></comment></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Regehr</surname><given-names>C</given-names> </name><name name-style="western"><surname>Glancy</surname><given-names>D</given-names> </name><name name-style="western"><surname>Pitts</surname><given-names>A</given-names> </name></person-group><article-title>Interventions to reduce stress in university students: a review and meta-analysis</article-title><source>J Affect Disord</source><year>2013</year><month>05</month><day>15</day><volume>148</volume><issue>1</issue><fpage>1</fpage><lpage>11</lpage><pub-id pub-id-type="doi">10.1016/j.jad.2012.11.026</pub-id><pub-id pub-id-type="medline">23246209</pub-id></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Schmidt</surname><given-names>MV</given-names> </name><name name-style="western"><surname>Sterlemann</surname><given-names>V</given-names> </name><name name-style="western"><surname>M&#x00FC;ller</surname><given-names>MB</given-names> </name></person-group><article-title>Chronic stress and individual vulnerability</article-title><source>Ann N Y Acad Sci</source><year>2008</year><month>12</month><volume>1148</volume><issue>1</issue><fpage>174</fpage><lpage>183</lpage><pub-id pub-id-type="doi">10.1196/annals.1410.017</pub-id><pub-id pub-id-type="medline">19120107</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Can</surname><given-names>YS</given-names> </name><name name-style="western"><surname>Arnrich</surname><given-names>B</given-names> </name><name name-style="western"><surname>Ersoy</surname><given-names>C</given-names> </name></person-group><article-title>Stress detection in daily life scenarios using smart phones and wearable sensors: a survey</article-title><source>J Biomed Inform</source><year>2019</year><month>04</month><volume>92</volume><fpage>103139</fpage><pub-id pub-id-type="doi">10.1016/j.jbi.2019.103139</pub-id><pub-id pub-id-type="medline">30825538</pub-id></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lo Martire</surname><given-names>V</given-names> </name><name name-style="western"><surname>Caruso</surname><given-names>D</given-names> </name><name name-style="western"><surname>Palagini</surname><given-names>L</given-names> </name><name name-style="western"><surname>Zoccoli</surname><given-names>G</given-names> </name><name name-style="western"><surname>Bastianini</surname><given-names>S</given-names> </name></person-group><article-title>Stress &#x0026; sleep: a relationship lasting a lifetime</article-title><source>Neurosci Biobehav Rev</source><year>2020</year><month>10</month><volume>117</volume><fpage>65</fpage><lpage>77</lpage><pub-id pub-id-type="doi">10.1016/j.neubiorev.2019.08.024</pub-id><pub-id pub-id-type="medline">31491473</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Avitsur</surname><given-names>R</given-names> </name><name name-style="western"><surname>Powell</surname><given-names>N</given-names> </name><name name-style="western"><surname>Padgett</surname><given-names>DA</given-names> </name><name name-style="western"><surname>Sheridan</surname><given-names>JF</given-names> </name></person-group><article-title>Social interactions, stress, and immunity</article-title><source>Immunol Allergy Clin North Am</source><year>2009</year><month>05</month><volume>29</volume><issue>2</issue><fpage>285</fpage><lpage>293</lpage><pub-id pub-id-type="doi">10.1016/j.iac.2009.02.006</pub-id><pub-id pub-id-type="medline">19389582</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Buddhiprabha</surname><given-names>DDP</given-names> </name><name name-style="western"><surname>Shabbeer</surname><given-names>A</given-names> </name><name name-style="western"><surname>Veena</surname><given-names>N</given-names> </name><name name-style="western"><surname>Shailaja</surname><given-names>S</given-names> </name></person-group><article-title>Stress and academic performance</article-title><source>Int J Indian Psychol</source><year>2016</year><volume>3</volume><issue>3</issue><fpage>71</fpage><lpage>82</lpage><pub-id pub-id-type="doi">10.25215/0303.068</pub-id></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Birch</surname><given-names>JN</given-names> </name><name name-style="western"><surname>Vanderheyden</surname><given-names>WM</given-names> </name></person-group><article-title>The molecular relationship between stress and insomnia</article-title><source>Adv Biol (Weinh)</source><year>2022</year><month>11</month><volume>6</volume><issue>11</issue><fpage>e2101203</fpage><pub-id pub-id-type="doi">10.1002/adbi.202101203</pub-id><pub-id pub-id-type="medline">35822937</pub-id></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Robinson</surname><given-names>L</given-names> </name></person-group><article-title>Stress and anxiety</article-title><source>Nurs Clin North Am</source><year>1990</year><month>12</month><volume>25</volume><issue>4</issue><fpage>935</fpage><lpage>943</lpage><pub-id pub-id-type="doi">10.1016/S0029-6465(22)02991-7</pub-id><pub-id pub-id-type="medline">2235645</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Dhabhar</surname><given-names>FS</given-names> </name></person-group><article-title>Effects of stress on immune function: the good, the bad, and the beautiful</article-title><source>Immunol Res</source><year>2014</year><month>05</month><volume>58</volume><issue>2-3</issue><fpage>193</fpage><lpage>210</lpage><pub-id pub-id-type="doi">10.1007/s12026-014-8517-0</pub-id><pub-id pub-id-type="medline">24798553</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Strath</surname><given-names>SJ</given-names> </name><name name-style="western"><surname>Rowley</surname><given-names>TW</given-names> </name></person-group><article-title>Wearables for promoting physical activity</article-title><source>Clin Chem</source><year>2018</year><month>01</month><volume>64</volume><issue>1</issue><fpage>53</fpage><lpage>63</lpage><pub-id pub-id-type="doi">10.1373/clinchem.2017.272369</pub-id><pub-id pub-id-type="medline">29118062</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Spil</surname><given-names>T</given-names> </name><name name-style="western"><surname>Sunyaev</surname><given-names>A</given-names> </name><name name-style="western"><surname>Thiebes</surname><given-names>S</given-names> </name><name name-style="western"><surname>Van Baalen</surname><given-names>R</given-names> </name></person-group><article-title>The adoption of wearables for a healthy lifestyle: can gamification help?</article-title><year>2017</year><month>01</month><day>4</day><conf-name>50th Annual Hawaii International Conference on System Sciences (HICSS-50)</conf-name><conf-date>Jan 4, 2017</conf-date><pub-id pub-id-type="doi">10.24251/HICSS.2017.437</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Passos</surname><given-names>J</given-names> </name><name name-style="western"><surname>Lopes</surname><given-names>SI</given-names> </name><name name-style="western"><surname>Clemente</surname><given-names>FM</given-names> </name><etal/></person-group><article-title>Wearables and internet of things (IoT) technologies for fitness assessment: a systematic review</article-title><source>Sensors (Basel)</source><year>2021</year><month>08</month><day>11</day><volume>21</volume><issue>16</issue><fpage>5418</fpage><pub-id pub-id-type="doi">10.3390/s21165418</pub-id><pub-id pub-id-type="medline">34450860</pub-id></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kaewkannate</surname><given-names>K</given-names> </name><name name-style="western"><surname>Kim</surname><given-names>S</given-names> </name></person-group><article-title>A comparison of wearable fitness devices</article-title><source>BMC Public Health</source><year>2016</year><month>05</month><day>24</day><volume>16</volume><issue>1</issue><fpage>433</fpage><pub-id pub-id-type="doi">10.1186/s12889-016-3059-0</pub-id><pub-id pub-id-type="medline">27220855</pub-id></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Bobade</surname><given-names>P</given-names> </name><name name-style="western"><surname>Vani</surname><given-names>M</given-names> </name></person-group><article-title>Stress detection with machine learning and deep learning using multimodal physiological data</article-title><conf-name>2020 Second International Conference on Inventive Research in Computing Applications (ICIRCA)</conf-name><conf-date>Jul 15-17, 2020</conf-date><conf-loc>Coimbatore, India</conf-loc><fpage>51</fpage><lpage>57</lpage><pub-id pub-id-type="doi">10.1109/ICIRCA48905.2020.9183244</pub-id></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Saylam</surname><given-names>B</given-names> </name><name name-style="western"><surname>&#x0130;ncel</surname><given-names>&#x00D6;D</given-names> </name></person-group><article-title>Multitask learning for mental health: depression, anxiety, stress (DAS) using wearables</article-title><source>Diagnostics (Basel)</source><year>2024</year><month>02</month><day>26</day><volume>14</volume><issue>5</issue><fpage>501</fpage><pub-id pub-id-type="doi">10.3390/diagnostics14050501</pub-id><pub-id pub-id-type="medline">38472973</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Vos</surname><given-names>G</given-names> </name><name name-style="western"><surname>Trinh</surname><given-names>K</given-names> </name><name name-style="western"><surname>Sarnyai</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Rahimi Azghadi</surname><given-names>M</given-names> </name></person-group><article-title>Generalizable machine learning for stress monitoring from wearable devices: a systematic literature review</article-title><source>Int J Med Inform</source><year>2023</year><month>05</month><volume>173</volume><fpage>105026</fpage><pub-id pub-id-type="doi">10.1016/j.ijmedinf.2023.105026</pub-id><pub-id pub-id-type="medline">36893657</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Lee</surname><given-names>H</given-names> </name><name name-style="western"><surname>Chang</surname><given-names>J</given-names> </name><name name-style="western"><surname>Jaewon</surname><given-names>K</given-names> </name><name name-style="western"><surname>Han</surname><given-names>B</given-names> </name><name name-style="western"><surname>Park</surname><given-names>SM</given-names> </name></person-group><article-title>Developing an explainable deep neural network for stress detection using biosignals and human-engineered features</article-title><source>SSRN</source><comment>Preprint posted online on  Aug 5, 2024</comment><pub-id pub-id-type="doi">10.2139/ssrn.4881618</pub-id></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Amin</surname><given-names>OB</given-names> </name><name name-style="western"><surname>Mishra</surname><given-names>V</given-names> </name><name name-style="western"><surname>Tapera</surname><given-names>TM</given-names> </name><name name-style="western"><surname>Volpe</surname><given-names>R</given-names> </name><name name-style="western"><surname>Sathyanarayana</surname><given-names>A</given-names> </name></person-group><article-title>Extending stress detection reproducibility to consumer wearable sensors</article-title><source>arXiv</source><comment>Preprint posted online on  May 9, 2025</comment><pub-id pub-id-type="doi">10.48550/arXiv.2505.05694</pub-id></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Belwafi</surname><given-names>K</given-names> </name><name name-style="western"><surname>Alsuwaidi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Mejri</surname><given-names>S</given-names> </name><name name-style="western"><surname>Djemal</surname><given-names>R</given-names> </name></person-group><article-title>Brain-inspired signal processing for detecting stress during mental arithmetic tasks</article-title><source>Brain Inf</source><year>2025</year><month>12</month><volume>12</volume><issue>1</issue><fpage>34</fpage><pub-id pub-id-type="doi">10.1186/s40708-025-00281-y</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Rosenbach</surname><given-names>H</given-names> </name><name name-style="western"><surname>Itzkovitch</surname><given-names>A</given-names> </name><name name-style="western"><surname>Gidron</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Schonberg</surname><given-names>T</given-names> </name></person-group><article-title>Assessing stress level scores against wearables-driven physiological measurements</article-title><source>Stress Health</source><year>2025</year><month>12</month><volume>41</volume><issue>6</issue><fpage>e70125</fpage><pub-id pub-id-type="doi">10.1002/smi.70125</pub-id><pub-id pub-id-type="medline">41292097</pub-id></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>M</given-names> </name><name name-style="western"><surname>Li</surname><given-names>J</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Hu</surname><given-names>B</given-names> </name></person-group><article-title>Stress severity detection in college students using emotional pulse signals and deep learning</article-title><source>IEEE Trans Affective Comput</source><year>2025</year><month>07</month><volume>16</volume><issue>3</issue><fpage>1942</fpage><lpage>1954</lpage><pub-id pub-id-type="doi">10.1109/TAFFC.2025.3547753</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mishra</surname><given-names>V</given-names> </name><name name-style="western"><surname>Sen</surname><given-names>S</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>G</given-names> </name><etal/></person-group><article-title>Evaluating the reproducibility of physiological stress detection models</article-title><source>Proc ACM Interact Mob Wearable Ubiquitous Technol</source><year>2020</year><month>12</month><volume>4</volume><issue>4</issue><fpage>1</fpage><lpage>29</lpage><pub-id pub-id-type="doi">10.1145/3432220</pub-id><pub-id pub-id-type="medline">36189150</pub-id></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Can</surname><given-names>YS</given-names> </name><name name-style="western"><surname>Gokay</surname><given-names>D</given-names> </name><name name-style="western"><surname>K&#x0131;l&#x0131;&#x00E7;</surname><given-names>DR</given-names> </name><name name-style="western"><surname>Ekiz</surname><given-names>D</given-names> </name><name name-style="western"><surname>Chalabianloo</surname><given-names>N</given-names> </name><name name-style="western"><surname>Ersoy</surname><given-names>C</given-names> </name></person-group><article-title>How laboratory experiments can be exploited for monitoring stress in the wild: a bridge between laboratory and daily life</article-title><source>Sensors (Basel)</source><year>2020</year><month>02</month><day>4</day><volume>20</volume><issue>3</issue><fpage>838</fpage><pub-id pub-id-type="doi">10.3390/s20030838</pub-id><pub-id pub-id-type="medline">32033238</pub-id></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Zhu</surname><given-names>L</given-names> </name><name name-style="western"><surname>Spachos</surname><given-names>P</given-names> </name><name name-style="western"><surname>Ng</surname><given-names>PC</given-names> </name><etal/></person-group><article-title>Stress detection through wrist-based electrodermal activity monitoring and machine learning</article-title><source>IEEE J Biomed Health Inform</source><year>2023</year><month>05</month><volume>27</volume><issue>5</issue><fpage>2155</fpage><lpage>2165</lpage><pub-id pub-id-type="doi">10.1109/JBHI.2023.3239305</pub-id><pub-id pub-id-type="medline">37022004</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Vos</surname><given-names>G</given-names> </name><name name-style="western"><surname>Trinh</surname><given-names>K</given-names> </name><name name-style="western"><surname>Sarnyai</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Rahimi Azghadi</surname><given-names>M</given-names> </name></person-group><article-title>Ensemble machine learning model trained on a new synthesized dataset generalizes well for stress prediction using wearable devices</article-title><source>J Biomed Inform</source><year>2023</year><month>12</month><volume>148</volume><fpage>104556</fpage><pub-id pub-id-type="doi">10.1016/j.jbi.2023.104556</pub-id></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>Q</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>BG</given-names> </name></person-group><article-title>Deep learning models for stress analysis in university students: a Sudoku-based study</article-title><source>Sensors (Basel)</source><year>2023</year><month>07</month><day>2</day><volume>23</volume><issue>13</issue><fpage>6099</fpage><pub-id pub-id-type="doi">10.3390/s23136099</pub-id><pub-id pub-id-type="medline">37447948</pub-id></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Yu</surname><given-names>H</given-names> </name><name name-style="western"><surname>Sano</surname><given-names>A</given-names> </name></person-group><article-title>Passive sensor data based future mood, health, and stress prediction: user adaptation using deep learning</article-title><access-date>2026-03-19</access-date><conf-name>2020 42nd Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC) in conjunction with the 43rd Annual Conference of the Canadian Medical and Biological Engineering Society</conf-name><conf-date>Jul 20-24, 2020</conf-date><conf-loc>Montreal, Canada</conf-loc><fpage>5884</fpage><lpage>5887</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=9167168">https://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=9167168</ext-link></comment><pub-id pub-id-type="doi">10.1109/EMBC44109.2020.9176242</pub-id></nlm-citation></ref><ref id="ref32"><label>32</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Vidal Bustamante</surname><given-names>CM</given-names> </name><name name-style="western"><surname>Coombs</surname><given-names>G</given-names>  <suffix>3rd</suffix></name><name name-style="western"><surname>Rahimi-Eichi</surname><given-names>H</given-names> </name><etal/></person-group><article-title>Fluctuations in behavior and affect in college students measured using deep phenotyping</article-title><source>Sci Rep</source><year>2022</year><month>02</month><day>4</day><volume>12</volume><issue>1</issue><fpage>1932</fpage><pub-id pub-id-type="doi">10.1038/s41598-022-05331-7</pub-id><pub-id pub-id-type="medline">35121741</pub-id></nlm-citation></ref><ref id="ref33"><label>33</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Yuting</surname><given-names>L</given-names> </name><name name-style="western"><surname>Rashid</surname><given-names>RABA</given-names> </name></person-group><article-title>Beyond the books: how sleep, school belonging, and physical activity affect the mental health of students under academic stress</article-title><source>Acta Psychol (Amst)</source><year>2025</year><month>08</month><volume>258</volume><fpage>105213</fpage><pub-id pub-id-type="doi">10.1016/j.actpsy.2025.105213</pub-id><pub-id pub-id-type="medline">40609190</pub-id></nlm-citation></ref><ref id="ref34"><label>34</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Arksey</surname><given-names>H</given-names> </name><name name-style="western"><surname>O&#x2019;Malley</surname><given-names>L</given-names> </name></person-group><article-title>Scoping studies: towards a methodological framework</article-title><source>Int J Soc Res Methodol</source><year>2005</year><month>02</month><volume>8</volume><issue>1</issue><fpage>19</fpage><lpage>32</lpage><pub-id pub-id-type="doi">10.1080/1364557032000119616</pub-id></nlm-citation></ref><ref id="ref35"><label>35</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tricco</surname><given-names>AC</given-names> </name><name name-style="western"><surname>Lillie</surname><given-names>E</given-names> </name><name name-style="western"><surname>Zarin</surname><given-names>W</given-names> </name><etal/></person-group><article-title>PRISMA Extension for Scoping Reviews (PRISMA-ScR): checklist and explanation</article-title><source>Ann Intern Med</source><year>2018</year><month>10</month><day>2</day><volume>169</volume><issue>7</issue><fpage>467</fpage><lpage>473</lpage><pub-id pub-id-type="doi">10.7326/M18-0850</pub-id><pub-id pub-id-type="medline">30178033</pub-id></nlm-citation></ref><ref id="ref36"><label>36</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Rethlefsen</surname><given-names>ML</given-names> </name><name name-style="western"><surname>Kirtley</surname><given-names>S</given-names> </name><name name-style="western"><surname>Waffenschmidt</surname><given-names>S</given-names> </name><etal/></person-group><article-title>PRISMA-S: an extension to the PRISMA Statement for Reporting Literature Searches in Systematic Reviews</article-title><source>Syst Rev</source><year>2021</year><month>01</month><day>26</day><volume>10</volume><issue>1</issue><fpage>39</fpage><pub-id pub-id-type="doi">10.1186/s13643-020-01542-z</pub-id><pub-id pub-id-type="medline">33499930</pub-id></nlm-citation></ref><ref id="ref37"><label>37</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ouzzani</surname><given-names>M</given-names> </name><name name-style="western"><surname>Hammady</surname><given-names>H</given-names> </name><name name-style="western"><surname>Fedorowicz</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Elmagarmid</surname><given-names>A</given-names> </name></person-group><article-title>Rayyan-a web and mobile app for systematic reviews</article-title><source>Syst Rev</source><year>2016</year><month>12</month><day>5</day><volume>5</volume><issue>1</issue><fpage>210</fpage><pub-id pub-id-type="doi">10.1186/s13643-016-0384-4</pub-id><pub-id pub-id-type="medline">27919275</pub-id></nlm-citation></ref><ref id="ref38"><label>38</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Bellante</surname><given-names>A</given-names> </name><name name-style="western"><surname>Bergamasco</surname><given-names>L</given-names> </name><name name-style="western"><surname>Bogdanovic</surname><given-names>A</given-names> </name><etal/></person-group><article-title>EMoCy: towards physiological signals-based stress detection</article-title><conf-name>2021 IEEE EMBS International Conference on Biomedical and Health Informatics (BHI)</conf-name><conf-date>Jul 27-30, 2021</conf-date><conf-loc>Athens, Greece</conf-loc><fpage>1</fpage><lpage>4</lpage><pub-id pub-id-type="doi">10.1109/BHI50953.2021.9508611</pub-id></nlm-citation></ref><ref id="ref39"><label>39</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Faro</surname><given-names>A</given-names> </name><name name-style="western"><surname>Giordano</surname><given-names>D</given-names> </name></person-group><article-title>Prognostics and management of mental stress by aiot monitoring and schlegel diagrams</article-title><conf-name>2021 IEEE International Smart Cities Conference (ISC2)</conf-name><conf-date>Sep 7-10, 2021</conf-date><conf-loc>Manchester, United Kingdom</conf-loc><fpage>1</fpage><lpage>7</lpage><pub-id pub-id-type="doi">10.1109/ISC253183.2021.9562844</pub-id></nlm-citation></ref><ref id="ref40"><label>40</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Faro</surname><given-names>A</given-names> </name><name name-style="western"><surname>Giordano</surname><given-names>D</given-names> </name><name name-style="western"><surname>Venticinque</surname><given-names>M</given-names> </name></person-group><article-title>Finding the proper mental stress model depending on context using edge devices and machine learning</article-title><access-date>2026-03-19</access-date><conf-name>2020 IEEE International Conference on Internet of Things and Intelligence System (IoTaIS)</conf-name><conf-date>Jan 27-28, 2021</conf-date><conf-loc>Bali, Indonesia</conf-loc><fpage>161</fpage><lpage>166</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=9359628">https://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=9359628</ext-link></comment><pub-id pub-id-type="doi">10.1109/IoTaIS50849.2021.9359701</pub-id></nlm-citation></ref><ref id="ref41"><label>41</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Iranfar</surname><given-names>A</given-names> </name><name name-style="western"><surname>Arza</surname><given-names>A</given-names> </name><name name-style="western"><surname>Atienza</surname><given-names>D</given-names> </name></person-group><article-title>ReLearn: a robust machine learning framework in presence of missing data for multimodal stress detection from physiological signals</article-title><source>Annu Int Conf IEEE Eng Med Biol Soc</source><year>2021</year><month>11</month><fpage>535</fpage><lpage>541</lpage><pub-id pub-id-type="doi">10.1109/EMBC46164.2021.9630040</pub-id><pub-id pub-id-type="medline">34891350</pub-id></nlm-citation></ref><ref id="ref42"><label>42</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mohammadi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Fakharzadeh</surname><given-names>M</given-names> </name><name name-style="western"><surname>Baraeinejad</surname><given-names>B</given-names> </name></person-group><article-title>An integrated human stress detection sensor using supervised algorithms</article-title><source>IEEE Sensors J</source><year>2022</year><volume>22</volume><issue>8</issue><fpage>8216</fpage><lpage>8223</lpage><pub-id pub-id-type="doi">10.1109/JSEN.2022.3157795</pub-id></nlm-citation></ref><ref id="ref43"><label>43</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Mustafa</surname><given-names>A</given-names> </name><name name-style="western"><surname>Alahmed</surname><given-names>M</given-names> </name><name name-style="western"><surname>Alhammadi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Soudan</surname><given-names>B</given-names> </name></person-group><article-title>Stress detector system using iot and artificial intelligence</article-title><conf-name>2020 Advances in Science and Engineering Technology International Conferences (ASET)</conf-name><conf-date>Feb 4 to Apr 9, 2020</conf-date><conf-loc>Dubai, United Arab Emirates</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/ASET48392.2020.9118345</pub-id></nlm-citation></ref><ref id="ref44"><label>44</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Arsalan</surname><given-names>A</given-names> </name><name name-style="western"><surname>Majid</surname><given-names>M</given-names> </name></person-group><article-title>Human stress classification during public speaking using physiological signals</article-title><source>Comput Biol Med</source><year>2021</year><month>06</month><volume>133</volume><fpage>104377</fpage><pub-id pub-id-type="doi">10.1016/j.compbiomed.2021.104377</pub-id><pub-id pub-id-type="medline">33866254</pub-id></nlm-citation></ref><ref id="ref45"><label>45</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>B</given-names> </name><name name-style="western"><surname>Sano</surname><given-names>A</given-names> </name></person-group><article-title>Early versus late modality fusion of deep wearable sensor features for personalized prediction of tomorrow&#x2019;s mood, health, and stress</article-title><conf-name>2020 42nd Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC) in conjunction with the 43rd Annual Conference of the Canadian Medical and Biological Engineering Society</conf-name><conf-date>Jul 20-24, 2020</conf-date><conf-loc>Montreal, Canada</conf-loc><fpage>5896</fpage><lpage>5899</lpage><pub-id pub-id-type="doi">10.1109/EMBC44109.2020.9175463</pub-id></nlm-citation></ref><ref id="ref46"><label>46</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cheadle</surname><given-names>JE</given-names> </name><name name-style="western"><surname>Goosby</surname><given-names>BJ</given-names> </name><name name-style="western"><surname>Jochman</surname><given-names>JC</given-names> </name><name name-style="western"><surname>Tomaso</surname><given-names>CC</given-names> </name><name name-style="western"><surname>Kozikowski Yancey</surname><given-names>CB</given-names> </name><name name-style="western"><surname>Nelson</surname><given-names>TD</given-names> </name></person-group><article-title>Race and ethnic variation in college students&#x2019; allostatic regulation of racism-related stress</article-title><source>Proc Natl Acad Sci U S A</source><year>2020</year><month>12</month><day>8</day><volume>117</volume><issue>49</issue><fpage>31053</fpage><lpage>31062</lpage><pub-id pub-id-type="doi">10.1073/pnas.1922025117</pub-id><pub-id pub-id-type="medline">33229568</pub-id></nlm-citation></ref><ref id="ref47"><label>47</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>M</given-names> </name><name name-style="western"><surname>Xiao</surname><given-names>W</given-names> </name><name name-style="western"><surname>Li</surname><given-names>M</given-names> </name><name name-style="western"><surname>Hao</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Hu</surname><given-names>L</given-names> </name><name name-style="western"><surname>Tao</surname><given-names>G</given-names> </name></person-group><article-title>A multi-feature and time-aware-based stress evaluation mechanism for mental status adjustment</article-title><source>ACM Trans Multimedia Comput Commun Appl</source><year>2022</year><month>02</month><day>28</day><volume>18</volume><issue>1s</issue><fpage>1</fpage><lpage>18</lpage><pub-id pub-id-type="doi">10.1145/3462763</pub-id></nlm-citation></ref><ref id="ref48"><label>48</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gupta</surname><given-names>D</given-names> </name><name name-style="western"><surname>Bhatia</surname><given-names>MPS</given-names> </name><name name-style="western"><surname>Kumar</surname><given-names>A</given-names> </name></person-group><article-title>Resolving data overload and latency issues in multivariate time-series IoMT data for mental health monitoring</article-title><source>IEEE Sensors J</source><year>2021</year><month>11</month><day>15</day><volume>21</volume><issue>22</issue><fpage>25421</fpage><lpage>25428</lpage><pub-id pub-id-type="doi">10.1109/JSEN.2021.3095853</pub-id></nlm-citation></ref><ref id="ref49"><label>49</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Panganiban</surname><given-names>FC</given-names> </name><name name-style="western"><surname>de Leon</surname><given-names>FA</given-names> </name></person-group><article-title>Stress detection using smartphone extracted photoplethysmography</article-title><conf-name>2021 IEEE Region 10 Symposium (TENSYMP)</conf-name><conf-date>Aug 23-25, 2021</conf-date><conf-loc>Jeju, Republic of Korea</conf-loc><fpage>1</fpage><lpage>7</lpage><pub-id pub-id-type="doi">10.1109/TENSYMP52854.2021.9550905</pub-id></nlm-citation></ref><ref id="ref50"><label>50</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Gasparini</surname><given-names>F</given-names> </name><name name-style="western"><surname>Grossi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Bandini</surname><given-names>S</given-names> </name></person-group><article-title>A deep learning approach to recognize cognitive load using PPG signals</article-title><year>2021</year><month>06</month><day>29</day><access-date>2026-03-19</access-date><conf-name>PETRA &#x2019;21: Proceedings of the 14th PErvasive Technologies Related to Assistive Environments Conference</conf-name><conf-date>Jun 29, 2021</conf-date><conf-loc>Corfu, Greece</conf-loc><fpage>489</fpage><lpage>495</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3453892">https://dl.acm.org/doi/proceedings/10.1145/3453892</ext-link></comment><pub-id pub-id-type="doi">10.1145/3453892.3461625</pub-id></nlm-citation></ref><ref id="ref51"><label>51</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Azgomi</surname><given-names>HF</given-names> </name><name name-style="western"><surname>Cajigas</surname><given-names>I</given-names> </name><name name-style="western"><surname>Faghih</surname><given-names>RT</given-names> </name></person-group><article-title>Closed-loop cognitive stress regulation using fuzzy control in wearable-machine interface architectures</article-title><source>IEEE Access</source><year>2021</year><volume>9</volume><fpage>106202</fpage><lpage>106219</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2021.3099027</pub-id></nlm-citation></ref><ref id="ref52"><label>52</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Han</surname><given-names>HJ</given-names> </name><name name-style="western"><surname>Labbaf</surname><given-names>S</given-names> </name><name name-style="western"><surname>Borelli</surname><given-names>JL</given-names> </name><name name-style="western"><surname>Dutt</surname><given-names>N</given-names> </name><name name-style="western"><surname>Rahmani</surname><given-names>AM</given-names> </name></person-group><article-title>Objective stress monitoring based on wearable sensors in everyday settings</article-title><source>J Med Eng Technol</source><year>2020</year><month>05</month><day>18</day><volume>44</volume><issue>4</issue><fpage>177</fpage><lpage>189</lpage><pub-id pub-id-type="doi">10.1080/03091902.2020.1759707</pub-id></nlm-citation></ref><ref id="ref53"><label>53</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Wu</surname><given-names>J</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Zhao</surname><given-names>X</given-names> </name></person-group><article-title>Stress detection using wearable devices based on transfer learning</article-title><conf-name>2021 IEEE International Conference on Bioinformatics and Biomedicine (BIBM)</conf-name><conf-date>Dec 9-12, 2021</conf-date><conf-loc>Houston, TX</conf-loc><fpage>3122</fpage><lpage>3128</lpage><pub-id pub-id-type="doi">10.1109/BIBM52615.2021.9669904</pub-id><pub-id pub-id-type="medline">36704639</pub-id></nlm-citation></ref><ref id="ref54"><label>54</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Jelsma</surname><given-names>EB</given-names> </name><name name-style="western"><surname>Goosby</surname><given-names>BJ</given-names> </name><name name-style="western"><surname>Cheadle</surname><given-names>JE</given-names> </name></person-group><article-title>Do trait psychological characteristics moderate sympathetic arousal to racial discrimination exposure in a natural setting?</article-title><source>Psychophysiology</source><year>2021</year><month>04</month><volume>58</volume><issue>4</issue><fpage>e13763</fpage><pub-id pub-id-type="doi">10.1111/psyp.13763</pub-id><pub-id pub-id-type="medline">33462861</pub-id></nlm-citation></ref><ref id="ref55"><label>55</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lai</surname><given-names>K</given-names> </name><name name-style="western"><surname>Yanushkevich</surname><given-names>SN</given-names> </name><name name-style="western"><surname>Shmerko</surname><given-names>VP</given-names> </name></person-group><article-title>Intelligent stress monitoring assistant for first responders</article-title><source>IEEE Access</source><year>2021</year><volume>9</volume><fpage>25314</fpage><lpage>25329</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2021.3057578</pub-id></nlm-citation></ref><ref id="ref56"><label>56</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Liakopoulos</surname><given-names>L</given-names> </name><name name-style="western"><surname>Stagakis</surname><given-names>N</given-names> </name><name name-style="western"><surname>Zacharaki</surname><given-names>EI</given-names> </name><name name-style="western"><surname>Moustakas</surname><given-names>K</given-names> </name></person-group><article-title>CNN-based stress and emotion recognition in ambulatory settings</article-title><conf-name>2021 12th International Conference on Information, Intelligence, Systems &#x0026; Applications (IISA)</conf-name><conf-date>Jul 12-14, 2021</conf-date><conf-loc>Chania Crete, Greece</conf-loc><fpage>1</fpage><lpage>8</lpage><pub-id pub-id-type="doi">10.1109/IISA52424.2021.9555508</pub-id></nlm-citation></ref><ref id="ref57"><label>57</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>B</given-names> </name><name name-style="western"><surname>Sano</surname><given-names>A</given-names> </name></person-group><article-title>Extraction and interpretation of deep autoencoder-based temporal features from wearables for forecasting personalized mood, health, and stress</article-title><source>Proc ACM Interact Mob Wearable Ubiquitous Technol</source><year>2020</year><month>06</month><day>15</day><volume>4</volume><issue>2</issue><fpage>1</fpage><lpage>26</lpage><pub-id pub-id-type="doi">10.1145/3397318</pub-id></nlm-citation></ref><ref id="ref58"><label>58</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hssayeni</surname><given-names>MD</given-names> </name><name name-style="western"><surname>Ghoraani</surname><given-names>B</given-names> </name></person-group><article-title>Multi-modal physiological data fusion for affect estimation using deep learning</article-title><source>IEEE Access</source><year>2021</year><volume>9</volume><fpage>21642</fpage><lpage>21652</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2021.3055933</pub-id></nlm-citation></ref><ref id="ref59"><label>59</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gil-Martin</surname><given-names>M</given-names> </name><name name-style="western"><surname>San-Segundo</surname><given-names>R</given-names> </name><name name-style="western"><surname>Mateos</surname><given-names>A</given-names> </name><name name-style="western"><surname>Ferreiros-Lopez</surname><given-names>J</given-names> </name></person-group><article-title>Human stress detection with wearable sensors using convolutional neural networks</article-title><source>IEEE Aerosp Electron Syst Mag</source><year>2022</year><month>01</month><day>1</day><volume>37</volume><issue>1</issue><fpage>60</fpage><lpage>70</lpage><pub-id pub-id-type="doi">10.1109/MAES.2021.3115198</pub-id></nlm-citation></ref><ref id="ref60"><label>60</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Han</surname><given-names>M</given-names> </name><name name-style="western"><surname>Ozdenizci</surname><given-names>O</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Koike-Akino</surname><given-names>T</given-names> </name><name name-style="western"><surname>Erdogmus</surname><given-names>D</given-names> </name></person-group><article-title>Disentangled adversarial transfer learning for physiological biosignals</article-title><access-date>2026-03-19</access-date><conf-name>2020 42nd Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC) in conjunction with the 43rd Annual Conference of the Canadian Medical and Biological Engineering Society</conf-name><conf-date>Jul 20-24, 2020</conf-date><conf-loc>Montreal, Canada</conf-loc><fpage>422</fpage><lpage>425</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=9167168">https://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=9167168</ext-link></comment><pub-id pub-id-type="doi">10.1109/EMBC44109.2020.9175233</pub-id></nlm-citation></ref><ref id="ref61"><label>61</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mishra</surname><given-names>V</given-names> </name><name name-style="western"><surname>Pope</surname><given-names>G</given-names> </name><name name-style="western"><surname>Lord</surname><given-names>S</given-names> </name><etal/></person-group><article-title>Continuous detection of physiological stress with commodity hardware</article-title><source>ACM Trans Comput Healthcare</source><year>2020</year><month>04</month><day>30</day><volume>1</volume><issue>2</issue><fpage>1</fpage><lpage>30</lpage><pub-id pub-id-type="doi">10.1145/3361562</pub-id></nlm-citation></ref><ref id="ref62"><label>62</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Momeni</surname><given-names>N</given-names> </name><name name-style="western"><surname>Valdes</surname><given-names>AA</given-names> </name><name name-style="western"><surname>Rodrigues</surname><given-names>J</given-names> </name><name name-style="western"><surname>Sandi</surname><given-names>C</given-names> </name><name name-style="western"><surname>Atienza</surname><given-names>D</given-names> </name></person-group><article-title>CAFS: cost-aware features selection method for multimodal stress monitoring on wearable devices</article-title><source>IEEE Trans Biomed Eng</source><year>2022</year><month>03</month><volume>69</volume><issue>3</issue><fpage>1072</fpage><lpage>1084</lpage><pub-id pub-id-type="doi">10.1109/TBME.2021.3113593</pub-id><pub-id pub-id-type="medline">34543185</pub-id></nlm-citation></ref><ref id="ref63"><label>63</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Rashid</surname><given-names>N</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>L</given-names> </name><name name-style="western"><surname>Dautta</surname><given-names>M</given-names> </name><name name-style="western"><surname>Jimenez</surname><given-names>A</given-names> </name><name name-style="western"><surname>Tseng</surname><given-names>P</given-names> </name><name name-style="western"><surname>Al Faruque</surname><given-names>MA</given-names> </name></person-group><article-title>Feature augmented hybrid CNN for stress recognition using wrist-based photoplethysmography sensor</article-title><year>2021</year><month>08</month><day>2</day><conf-name>2021 43rd Annual International Conference of the IEEE Engineering in Medicine &#x0026; Biology Society (EMBC)</conf-name><conf-date>Nov 1-5, 2021</conf-date><pub-id pub-id-type="doi">10.1109/EMBC46164.2021.9630576</pub-id></nlm-citation></ref><ref id="ref64"><label>64</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Yannam</surname><given-names>PKR</given-names> </name><name name-style="western"><surname>Venkatesh</surname><given-names>V</given-names> </name><name name-style="western"><surname>Gupta</surname><given-names>M</given-names> </name></person-group><article-title>Research study and system design for evaluating student stress in indian academic setting</article-title><conf-name>2022 14th International Conference on COMmunication Systems &#x0026; NETworkS (COMSNETS)</conf-name><conf-date>Jan 4-8, 2022</conf-date><conf-loc>Bangalore, India</conf-loc><fpage>54</fpage><lpage>59</lpage><pub-id pub-id-type="doi">10.1109/COMSNETS53615.2022.9668379</pub-id></nlm-citation></ref><ref id="ref65"><label>65</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Pakhomov</surname><given-names>SVS</given-names> </name><name name-style="western"><surname>Thuras</surname><given-names>PD</given-names> </name><name name-style="western"><surname>Finzel</surname><given-names>R</given-names> </name><name name-style="western"><surname>Eppel</surname><given-names>J</given-names> </name><name name-style="western"><surname>Kotlyar</surname><given-names>M</given-names> </name></person-group><person-group person-group-type="editor"><name name-style="western"><surname>Cabiati</surname><given-names>M</given-names> </name></person-group><article-title>Using consumer-wearable technology for remote assessment of physiological response to stress in the naturalistic environment</article-title><source>PLoS ONE</source><year>2020</year><volume>15</volume><issue>3</issue><fpage>e0229942</fpage><pub-id pub-id-type="doi">10.1371/journal.pone.0229942</pub-id><pub-id pub-id-type="medline">32210441</pub-id></nlm-citation></ref><ref id="ref66"><label>66</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Holder</surname><given-names>R</given-names> </name><name name-style="western"><surname>Sah</surname><given-names>RK</given-names> </name><name name-style="western"><surname>Cleveland</surname><given-names>M</given-names> </name><name name-style="western"><surname>Ghasemzadeh</surname><given-names>H</given-names> </name></person-group><article-title>Comparing the predictability of sensor modalities to detect stress from wearable sensor data</article-title><conf-name>2022 IEEE 19th Annual Consumer Communications &#x0026; Networking Conference (CCNC)</conf-name><conf-date>Jan 8-11, 2022</conf-date><conf-loc>Las Vegas, NV</conf-loc><fpage>557</fpage><lpage>562</lpage><pub-id pub-id-type="doi">10.1109/CCNC49033.2022.9700682</pub-id></nlm-citation></ref><ref id="ref67"><label>67</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Elzeiny</surname><given-names>S</given-names> </name><name name-style="western"><surname>Qaraqe</surname><given-names>M</given-names> </name></person-group><article-title>Automatic and intelligent stressor identification based on photoplethysmography analysis</article-title><source>IEEE Access</source><year>2021</year><volume>9</volume><fpage>68498</fpage><lpage>68510</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2021.3077358</pub-id></nlm-citation></ref><ref id="ref68"><label>68</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Heo</surname><given-names>S</given-names> </name><name name-style="western"><surname>Kwon</surname><given-names>S</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>J</given-names> </name></person-group><article-title>Stress detection with single PPG sensor by orchestrating multiple denoising and peak-detecting methods</article-title><source>IEEE Access</source><year>2021</year><volume>9</volume><fpage>47777</fpage><lpage>47785</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2021.3060441</pub-id></nlm-citation></ref><ref id="ref69"><label>69</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Kar</surname><given-names>SP</given-names> </name><name name-style="western"><surname>Kumar Rout</surname><given-names>N</given-names> </name><name name-style="western"><surname>Joshi</surname><given-names>J</given-names> </name></person-group><article-title>Assessment of mental stress from limited features based on GRU-RNN</article-title><conf-name>2021 IEEE 2nd International Conference on Applied Electromagnetics, Signal Processing, &#x0026; Communication (AESPC)</conf-name><conf-date>Nov 26-28, 2021</conf-date><conf-loc>Bhubaneswar, India</conf-loc><fpage>1</fpage><lpage>4</lpage><pub-id pub-id-type="doi">10.1109/AESPC52704.2021.9708506</pub-id></nlm-citation></ref><ref id="ref70"><label>70</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Prashant Bhanushali</surname><given-names>S</given-names> </name><name name-style="western"><surname>Sadasivuni</surname><given-names>S</given-names> </name><name name-style="western"><surname>Banerjee</surname><given-names>I</given-names> </name><name name-style="western"><surname>Sanyal</surname><given-names>A</given-names> </name></person-group><article-title>Digital machine learning circuit for real-time stress detection from wearable ECG sensor</article-title><conf-name>2020 IEEE 63rd International Midwest Symposium on Circuits and Systems (MWSCAS)</conf-name><conf-date>Aug 19-20, 2020</conf-date><conf-loc>Springfield, MA</conf-loc><fpage>978</fpage><lpage>981</lpage><pub-id pub-id-type="doi">10.1109/MWSCAS48704.2020.9184466</pub-id></nlm-citation></ref><ref id="ref71"><label>71</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Samyoun</surname><given-names>S</given-names> </name><name name-style="western"><surname>Sayeed Mondol</surname><given-names>A</given-names> </name><name name-style="western"><surname>Stankovic</surname><given-names>JA</given-names> </name></person-group><article-title>Stress detection via sensor translation</article-title><access-date>2026-03-19</access-date><conf-name>2020 16th International Conference on Distributed Computing in Sensor Systems (DCOSS)</conf-name><conf-date>May 25-27, 2020</conf-date><conf-loc>Marina del Rey, CA</conf-loc><fpage>19</fpage><lpage>26</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=9178819">https://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=9178819</ext-link></comment><pub-id pub-id-type="doi">10.1109/DCOSS49796.2020.00017</pub-id></nlm-citation></ref><ref id="ref72"><label>72</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Silva</surname><given-names>E</given-names> </name><name name-style="western"><surname>Aguiar</surname><given-names>J</given-names> </name><name name-style="western"><surname>Reis</surname><given-names>LP</given-names> </name><name name-style="western"><surname>S&#x00E1;</surname><given-names>JOE</given-names> </name><name name-style="western"><surname>Gon&#x00E7;alves</surname><given-names>J</given-names> </name><name name-style="western"><surname>Carvalho</surname><given-names>V</given-names> </name></person-group><article-title>Stress among Portuguese medical students: the EuStress solution</article-title><source>J Med Syst</source><year>2020</year><month>01</month><day>2</day><volume>44</volume><issue>2</issue><fpage>45</fpage><pub-id pub-id-type="doi">10.1007/s10916-019-1520-1</pub-id><pub-id pub-id-type="medline">31897774</pub-id></nlm-citation></ref><ref id="ref73"><label>73</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Islam</surname><given-names>TZ</given-names> </name><name name-style="western"><surname>Wu Liang</surname><given-names>P</given-names> </name><name name-style="western"><surname>Sweeney</surname><given-names>F</given-names> </name><etal/></person-group><article-title>College life is hard! - shedding light on stress prediction for autistic college students using data-driven analysis</article-title><conf-name>2021 IEEE 45th Annual Computers, Software, and Applications Conference (COMPSAC)</conf-name><conf-date>Jul 12-16, 2021</conf-date><conf-loc>Madrid, Spain</conf-loc><fpage>428</fpage><lpage>437</lpage><pub-id pub-id-type="doi">10.1109/COMPSAC51774.2021.00066</pub-id></nlm-citation></ref><ref id="ref74"><label>74</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Wu</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Daoudi</surname><given-names>M</given-names> </name><name name-style="western"><surname>Amad</surname><given-names>A</given-names> </name><name name-style="western"><surname>Sparrow</surname><given-names>L</given-names> </name><name name-style="western"><surname>D&#x2019;Hondt</surname><given-names>F</given-names> </name></person-group><article-title>Unsupervised learning method for exploring students&#x2019; mental stress in medical simulation training</article-title><year>2020</year><month>10</month><day>25</day><access-date>2026-03-19</access-date><conf-name>ICMI &#x2019;20 Companion: Companion Publication of the 2020 International Conference on Multimodal Interaction</conf-name><conf-date>Oct 25, 2020</conf-date><conf-loc>Virtual Event, The Netherlands</conf-loc><fpage>165</fpage><lpage>170</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3395035">https://dl.acm.org/doi/proceedings/10.1145/3395035</ext-link></comment><pub-id pub-id-type="doi">10.1145/3395035.3425191</pub-id></nlm-citation></ref><ref id="ref75"><label>75</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mitro</surname><given-names>N</given-names> </name><name name-style="western"><surname>Argyri</surname><given-names>K</given-names> </name><name name-style="western"><surname>Pavlopoulos</surname><given-names>L</given-names> </name><etal/></person-group><article-title>AI-enabled smart wristband providing real-time vital signs and stress monitoring</article-title><source>Sensors (Basel)</source><year>2023</year><month>03</month><day>4</day><volume>23</volume><issue>5</issue><fpage>2821</fpage><pub-id pub-id-type="doi">10.3390/s23052821</pub-id><pub-id pub-id-type="medline">36905025</pub-id></nlm-citation></ref><ref id="ref76"><label>76</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tutunji</surname><given-names>R</given-names> </name><name name-style="western"><surname>Kogias</surname><given-names>N</given-names> </name><name name-style="western"><surname>Kapteijns</surname><given-names>B</given-names> </name><etal/></person-group><article-title>Detecting prolonged stress in real life using wearable biosensors and ecological momentary assessments: naturalistic experimental study</article-title><source>J Med Internet Res</source><year>2023</year><month>10</month><day>19</day><volume>25</volume><fpage>e39995</fpage><pub-id pub-id-type="doi">10.2196/39995</pub-id><pub-id pub-id-type="medline">37856180</pub-id></nlm-citation></ref><ref id="ref77"><label>77</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lange</surname><given-names>L</given-names> </name><name name-style="western"><surname>Wenzlitschke</surname><given-names>N</given-names> </name><name name-style="western"><surname>Rahm</surname><given-names>E</given-names> </name></person-group><article-title>Generating synthetic health sensor data for privacy-preserving wearable stress detection</article-title><source>Sensors (Basel)</source><year>2024</year><month>05</month><day>11</day><volume>24</volume><issue>10</issue><fpage>3052</fpage><pub-id pub-id-type="doi">10.3390/s24103052</pub-id><pub-id pub-id-type="medline">38793906</pub-id></nlm-citation></ref><ref id="ref78"><label>78</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Abdul Kader</surname><given-names>L</given-names> </name><name name-style="western"><surname>Al-Shargie</surname><given-names>F</given-names> </name><name name-style="western"><surname>Tariq</surname><given-names>U</given-names> </name><name name-style="western"><surname>Al-Nashash</surname><given-names>H</given-names> </name></person-group><article-title>One-channel wearable mental stress state monitoring system</article-title><source>Sensors (Basel)</source><year>2024</year><month>08</month><day>20</day><volume>24</volume><issue>16</issue><fpage>5373</fpage><pub-id pub-id-type="doi">10.3390/s24165373</pub-id><pub-id pub-id-type="medline">39205067</pub-id></nlm-citation></ref><ref id="ref79"><label>79</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Almadhor</surname><given-names>A</given-names> </name><name name-style="western"><surname>Sampedro</surname><given-names>GA</given-names> </name><name name-style="western"><surname>Abisado</surname><given-names>M</given-names> </name><etal/></person-group><article-title>Wrist-based electrodermal activity monitoring for stress detection using federated learning</article-title><source>Sensors (Basel)</source><year>2023</year><month>04</month><day>14</day><volume>23</volume><issue>8</issue><fpage>3984</fpage><pub-id pub-id-type="doi">10.3390/s23083984</pub-id><pub-id pub-id-type="medline">37112323</pub-id></nlm-citation></ref><ref id="ref80"><label>80</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mai</surname><given-names>ND</given-names> </name><name name-style="western"><surname>Chung</surname><given-names>WY</given-names> </name></person-group><article-title>On-chip mental stress detection: integrating a wearable behind-the-ear EEG device with embedded tiny neural network</article-title><source>IEEE J Biomed Health Inform</source><year>2025</year><month>03</month><volume>29</volume><issue>3</issue><fpage>1872</fpage><lpage>1885</lpage><pub-id pub-id-type="doi">10.1109/JBHI.2024.3519600</pub-id><pub-id pub-id-type="medline">40030726</pub-id></nlm-citation></ref><ref id="ref81"><label>81</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sepanloo</surname><given-names>K</given-names> </name><name name-style="western"><surname>Shevelev</surname><given-names>D</given-names> </name><name name-style="western"><surname>Son</surname><given-names>YJ</given-names> </name><name name-style="western"><surname>Aras</surname><given-names>S</given-names> </name><name name-style="western"><surname>Hinton</surname><given-names>JE</given-names> </name></person-group><article-title>Assessing physiological stress responses in student nurses using mixed reality training</article-title><source>Sensors (Basel)</source><year>2025</year><month>05</month><day>20</day><volume>25</volume><issue>10</issue><fpage>3222</fpage><pub-id pub-id-type="doi">10.3390/s25103222</pub-id><pub-id pub-id-type="medline">40432013</pub-id></nlm-citation></ref><ref id="ref82"><label>82</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Darwish</surname><given-names>BA</given-names> </name><name name-style="western"><surname>Rehman</surname><given-names>SU</given-names> </name><name name-style="western"><surname>Sadek</surname><given-names>I</given-names> </name><name name-style="western"><surname>Salem</surname><given-names>NM</given-names> </name><name name-style="western"><surname>Kareem</surname><given-names>G</given-names> </name><name name-style="western"><surname>Mahmoud</surname><given-names>LN</given-names> </name></person-group><article-title>From lab to real-life: a three-stage validation of wearable technology for stress monitoring</article-title><source>MethodsX</source><year>2025</year><month>06</month><volume>14</volume><fpage>103205</fpage><pub-id pub-id-type="doi">10.1016/j.mex.2025.103205</pub-id><pub-id pub-id-type="medline">39996105</pub-id></nlm-citation></ref><ref id="ref83"><label>83</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lim</surname><given-names>KYT</given-names> </name><name name-style="western"><surname>Nguyen Thien</surname><given-names>MT</given-names> </name><name name-style="western"><surname>Nguyen Duc</surname><given-names>MA</given-names> </name><name name-style="western"><surname>Posada-Quintero</surname><given-names>HF</given-names> </name></person-group><article-title>Application of DIY electrodermal activity wristband in detecting stress and affective responses of students</article-title><source>Bioengineering (Basel)</source><year>2024</year><month>03</month><day>20</day><volume>11</volume><issue>3</issue><fpage>291</fpage><pub-id pub-id-type="doi">10.3390/bioengineering11030291</pub-id><pub-id pub-id-type="medline">38534565</pub-id></nlm-citation></ref><ref id="ref84"><label>84</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Nazeer</surname><given-names>M</given-names> </name><name name-style="western"><surname>Salagrama</surname><given-names>S</given-names> </name><name name-style="western"><surname>Kumar</surname><given-names>P</given-names> </name><etal/></person-group><article-title>Improved method for stress detection using bio-sensor technology and machine learning algorithms</article-title><source>MethodsX</source><year>2024</year><month>06</month><volume>12</volume><fpage>102581</fpage><pub-id pub-id-type="doi">10.1016/j.mex.2024.102581</pub-id><pub-id pub-id-type="medline">38322136</pub-id></nlm-citation></ref><ref id="ref85"><label>85</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Almadhor</surname><given-names>A</given-names> </name><name name-style="western"><surname>Sampedro</surname><given-names>GA</given-names> </name><name name-style="western"><surname>Abisado</surname><given-names>M</given-names> </name><name name-style="western"><surname>Abbas</surname><given-names>S</given-names> </name></person-group><article-title>Efficient feature-selection-based stacking model for stress detection based on chest electrodermal activity</article-title><source>Sensors (Basel)</source><year>2023</year><month>07</month><day>25</day><volume>23</volume><issue>15</issue><fpage>6664</fpage><pub-id pub-id-type="doi">10.3390/s23156664</pub-id><pub-id pub-id-type="medline">37571448</pub-id></nlm-citation></ref><ref id="ref86"><label>86</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Str&#x017E;inar</surname><given-names>&#x017D;</given-names> </name><name name-style="western"><surname>Sanchis</surname><given-names>A</given-names> </name><name name-style="western"><surname>Ledezma</surname><given-names>A</given-names> </name><name name-style="western"><surname>Sipele</surname><given-names>O</given-names> </name><name name-style="western"><surname>Pregelj</surname><given-names>B</given-names> </name><name name-style="western"><surname>&#x0160;krjanc</surname><given-names>I</given-names> </name></person-group><article-title>Stress detection using frequency spectrum analysis of wrist-measured electrodermal activity</article-title><source>Sensors (Basel)</source><year>2023</year><month>01</month><day>14</day><volume>23</volume><issue>2</issue><fpage>963</fpage><pub-id pub-id-type="doi">10.3390/s23020963</pub-id><pub-id pub-id-type="medline">36679760</pub-id></nlm-citation></ref><ref id="ref87"><label>87</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Feng</surname><given-names>M</given-names> </name><name name-style="western"><surname>Fang</surname><given-names>T</given-names> </name><name name-style="western"><surname>He</surname><given-names>C</given-names> </name><name name-style="western"><surname>Li</surname><given-names>M</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>J</given-names> </name></person-group><article-title>Affect and stress detection based on feature fusion of LSTM and 1DCNN</article-title><source>Comput Methods Biomech Biomed Engin</source><year>2024</year><volume>27</volume><issue>4</issue><fpage>512</fpage><lpage>520</lpage><pub-id pub-id-type="doi">10.1080/10255842.2023.2188988</pub-id><pub-id pub-id-type="medline">36919485</pub-id></nlm-citation></ref><ref id="ref88"><label>88</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Xuanzhi</surname><given-names>L</given-names> </name><name name-style="western"><surname>Hakeem</surname><given-names>A</given-names> </name><name name-style="western"><surname>Mohaisen</surname><given-names>L</given-names> </name><etal/></person-group><article-title>BrainNet: an automated approach for brain stress prediction utilizing electrodermal activity signal with XLNet model</article-title><source>Front Comput Neurosci</source><year>2024</year><volume>18</volume><fpage>1482994</fpage><pub-id pub-id-type="doi">10.3389/fncom.2024.1482994</pub-id><pub-id pub-id-type="medline">39512386</pub-id></nlm-citation></ref><ref id="ref89"><label>89</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Vidal Bustamante</surname><given-names>CM</given-names> </name><name name-style="western"><surname>Coombs Iii</surname><given-names>G</given-names> </name><name name-style="western"><surname>Rahimi-Eichi</surname><given-names>H</given-names> </name><etal/></person-group><article-title>Precision assessment of real-world associations between stress and sleep duration using actigraphy data collected continuously for an academic year: individual-level modeling study</article-title><source>JMIR Form Res</source><year>2024</year><month>04</month><day>30</day><volume>8</volume><fpage>e53441</fpage><pub-id pub-id-type="doi">10.2196/53441</pub-id><pub-id pub-id-type="medline">38687600</pub-id></nlm-citation></ref><ref id="ref90"><label>90</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Fauzi</surname><given-names>MA</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>B</given-names> </name><name name-style="western"><surname>Yeng</surname><given-names>P</given-names> </name></person-group><article-title>Improving stress detection using weighted score-level fusion of multiple sensor</article-title><year>2022</year><month>11</month><day>22</day><access-date>2026-03-19</access-date><conf-name>SIET &#x2019;22: Proceedings of the 7th International Conference on Sustainable Information Engineering and Technology</conf-name><conf-date>Jan 13, 2023</conf-date><conf-loc>Malang, Indonesia</conf-loc><fpage>65</fpage><lpage>71</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3568231">https://dl.acm.org/doi/proceedings/10.1145/3568231</ext-link></comment><pub-id pub-id-type="doi">10.1145/3568231.3568242</pub-id></nlm-citation></ref><ref id="ref91"><label>91</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Tazarv</surname><given-names>A</given-names> </name><name name-style="western"><surname>Labbaf</surname><given-names>S</given-names> </name><name name-style="western"><surname>Rahmani</surname><given-names>A</given-names> </name><name name-style="western"><surname>Dutt</surname><given-names>N</given-names> </name><name name-style="western"><surname>Levorato</surname><given-names>M</given-names> </name></person-group><article-title>Active reinforcement learning for personalized stress monitoring in everyday settings</article-title><year>2023</year><month>06</month><day>21</day><access-date>2026-03-19</access-date><conf-name>CHASE &#x2019;23: Proceedings of the 8th ACM/IEEE International Conference on Connected Health: Applications, Systems and Engineering Technologies</conf-name><conf-date>Jan 22, 2024</conf-date><conf-loc>Orlando, FL</conf-loc><fpage>44</fpage><lpage>55</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3580252">https://dl.acm.org/doi/proceedings/10.1145/3580252</ext-link></comment><pub-id pub-id-type="doi">10.1145/3580252.3586979</pub-id></nlm-citation></ref><ref id="ref92"><label>92</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Alfredo</surname><given-names>RD</given-names> </name><name name-style="western"><surname>Nie</surname><given-names>L</given-names> </name><name name-style="western"><surname>Kennedy</surname><given-names>P</given-names> </name><etal/></person-group><article-title>&#x201C;That student should be a lion tamer!&#x201D; stressviz: designing a stress analytics dashboard for teachers</article-title><year>2023</year><month>03</month><day>13</day><access-date>2026-03-19</access-date><conf-name>LAK2023: LAK23: 13th International Learning Analytics and Knowledge Conference</conf-name><conf-date>Mar 13, 2023</conf-date><conf-loc>Arlington, TX</conf-loc><fpage>57</fpage><lpage>67</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3576050">https://dl.acm.org/doi/proceedings/10.1145/3576050</ext-link></comment><pub-id pub-id-type="doi">10.1145/3576050.3576058</pub-id></nlm-citation></ref><ref id="ref93"><label>93</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Su</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Ge</surname><given-names>L</given-names> </name><name name-style="western"><surname>Wei</surname><given-names>G</given-names> </name></person-group><article-title>Random forest model predicts stress level in a sample of 18,403 college students</article-title><year>2024</year><month>06</month><day>21</day><access-date>2026-03-19</access-date><conf-name>CAIBDA &#x2019;24: Proceedings of the 2024 4th International Conference on Artificial Intelligence, Big Data and Algorithms</conf-name><conf-date>Oct 24, 2024</conf-date><conf-loc>Zhengzhou, China</conf-loc><fpage>588</fpage><lpage>593</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3690407">https://dl.acm.org/doi/proceedings/10.1145/3690407</ext-link></comment><pub-id pub-id-type="doi">10.1145/3690407.3690507</pub-id></nlm-citation></ref><ref id="ref94"><label>94</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Wang</surname><given-names>L</given-names> </name><name name-style="western"><surname>Hao</surname><given-names>J</given-names> </name><name name-style="western"><surname>Zhou</surname><given-names>TH</given-names> </name><name name-style="western"><surname>Song</surname><given-names>F</given-names> </name></person-group><article-title>ECG stress detection model based on heart rate variability feature extraction</article-title><year>2023</year><month>06</month><day>17</day><access-date>2026-03-19</access-date><conf-name>HP3C &#x2019;23: Proceedings of the 2023 7th International Conference on High Performance Compilation, Computing and Communications</conf-name><conf-date>Nov 16, 2023</conf-date><conf-loc>Jinan, China</conf-loc><fpage>184</fpage><lpage>188</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3606043">https://dl.acm.org/doi/proceedings/10.1145/3606043</ext-link></comment><pub-id pub-id-type="doi">10.1145/3606043.3606069</pub-id></nlm-citation></ref><ref id="ref95"><label>95</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Can</surname><given-names>YS</given-names> </name><name name-style="western"><surname>Andr&#x00E9;</surname><given-names>E</given-names> </name></person-group><article-title>Performance exploration of RNN variants for recognizing daily life stress levels by using multimodal physiological signals</article-title><year>2023</year><month>10</month><day>9</day><access-date>2026-03-19</access-date><conf-name>ICMI &#x2019;23: Proceedings of the 25th International Conference on Multimodal Interaction</conf-name><conf-date>Oct 9, 2023</conf-date><conf-loc>Paris, France</conf-loc><fpage>481</fpage><lpage>487</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3577190">https://dl.acm.org/doi/proceedings/10.1145/3577190</ext-link></comment><pub-id pub-id-type="doi">10.1145/3577190.3614159</pub-id></nlm-citation></ref><ref id="ref96"><label>96</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Prajod</surname><given-names>P</given-names> </name><name name-style="western"><surname>Mahesh</surname><given-names>B</given-names> </name><name name-style="western"><surname>Andr&#x00E9;</surname><given-names>E</given-names> </name></person-group><article-title>Stressor type matters! --- exploring factors influencing cross-dataset generalizability of physiological stress detection</article-title><year>2024</year><month>11</month><day>4</day><access-date>2026-03-19</access-date><conf-name>ICMI &#x2019;24: Proceedings of the 26th International Conference on Multimodal Interaction</conf-name><conf-date>Nov 4, 2024</conf-date><conf-loc>San Jose, Costa Rica</conf-loc><fpage>508</fpage><lpage>517</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3678957">https://dl.acm.org/doi/proceedings/10.1145/3678957</ext-link></comment><pub-id pub-id-type="doi">10.1145/3678957.3685738</pub-id></nlm-citation></ref><ref id="ref97"><label>97</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Ganesan</surname><given-names>P</given-names> </name><name name-style="western"><surname>Thota</surname><given-names>YR</given-names> </name><name name-style="western"><surname>Shehata</surname><given-names>H</given-names> </name><name name-style="western"><surname>Nikoubin</surname><given-names>T</given-names> </name></person-group><article-title>TinyML based stress detection utilizing PPG signals: a lightweight approach for smart wearable devices</article-title><year>2025</year><month>06</month><day>30</day><access-date>2026-03-19</access-date><conf-name>Proceedings of the Great Lakes Symposium on VLSI 2025</conf-name><conf-date>Jun 30, 2025</conf-date><conf-loc>New Orleans, LA</conf-loc><fpage>941</fpage><lpage>946</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3716368">https://dl.acm.org/doi/proceedings/10.1145/3716368</ext-link></comment><pub-id pub-id-type="doi">10.1145/3716368.3735274</pub-id></nlm-citation></ref><ref id="ref98"><label>98</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Sun</surname><given-names>X</given-names> </name><name name-style="western"><surname>Zhao</surname><given-names>L</given-names> </name><name name-style="western"><surname>Gao</surname><given-names>R</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>X</given-names> </name></person-group><article-title>Stress recognition based on the markov transition field of electrodermal activity</article-title><year>2025</year><month>01</month><day>10</day><access-date>2026-03-19</access-date><conf-name>BIC &#x2019;25: Proceedings of the 2025 5th International Conference on Bioinformatics and Intelligent Computing</conf-name><conf-date>Jan 10, 2025</conf-date><conf-loc>Shenyang, China</conf-loc><fpage>467</fpage><lpage>472</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3724979">https://dl.acm.org/doi/proceedings/10.1145/3724979</ext-link></comment><pub-id pub-id-type="doi">10.1145/3724979.3725051</pub-id></nlm-citation></ref><ref id="ref99"><label>99</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Neigel</surname><given-names>P</given-names> </name><name name-style="western"><surname>Vargo</surname><given-names>A</given-names> </name><name name-style="western"><surname>Tag</surname><given-names>B</given-names> </name><name name-style="western"><surname>Kise</surname><given-names>K</given-names> </name></person-group><article-title>Using wearables to unobtrusively identify periods of stress in a real university environment</article-title><year>2024</year><month>10</month><day>5</day><access-date>2026-03-19</access-date><conf-name>ISWC &#x2019;24: Proceedings of the 2024 ACM International Symposium on Wearable Computers</conf-name><conf-date>Oct 5, 2024</conf-date><conf-loc>Melbourne, Australia</conf-loc><fpage>17</fpage><lpage>24</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3675095">https://dl.acm.org/doi/proceedings/10.1145/3675095</ext-link></comment><pub-id pub-id-type="doi">10.1145/3675095.3676608</pub-id></nlm-citation></ref><ref id="ref100"><label>100</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Pogliaghi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Di Lascio</surname><given-names>E</given-names> </name><name name-style="western"><surname>Gashi</surname><given-names>S</given-names> </name><name name-style="western"><surname>Piciucco</surname><given-names>E</given-names> </name><name name-style="western"><surname>Santini</surname><given-names>S</given-names> </name><name name-style="western"><surname>Gjoreski</surname><given-names>M</given-names> </name></person-group><article-title>Multi-task learning for stress recognition</article-title><year>2022</year><month>09</month><day>11</day><access-date>2026-03-19</access-date><conf-name>Proceedings of the 2022 ACM International Joint Conference on Pervasive and Ubiquitous Computing and the 2022</conf-name><conf-date>Sep 11, 2022</conf-date><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3544793">https://dl.acm.org/doi/proceedings/10.1145/3544793</ext-link></comment><pub-id pub-id-type="doi">10.1145/3544793.3563404</pub-id></nlm-citation></ref><ref id="ref101"><label>101</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Jaiswal</surname><given-names>D</given-names> </name><name name-style="western"><surname>Chatterjee</surname><given-names>D</given-names> </name><name name-style="western"><surname>B s</surname><given-names>M</given-names> </name><name name-style="western"><surname>Ramakrishnan</surname><given-names>RK</given-names> </name><name name-style="western"><surname>Pal</surname><given-names>A</given-names> </name></person-group><article-title>GSR based generic stress prediction system</article-title><conf-name>Proceedings of the 2023 ACM International Joint Conference on Pervasive and Ubiquitous Computing &#x0026; the 2023 ACM International Symposium on Wearable Computing</conf-name><conf-date>Oct 8, 2023</conf-date><pub-id pub-id-type="doi">10.1145/3594739.3610734</pub-id></nlm-citation></ref><ref id="ref102"><label>102</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Rashid</surname><given-names>N</given-names> </name><name name-style="western"><surname>Mortlock</surname><given-names>T</given-names> </name><name name-style="western"><surname>Faruque</surname><given-names>MAA</given-names> </name></person-group><article-title>Stress detection using context-aware sensor fusion from wearable devices</article-title><source>IEEE Internet Things J</source><year>2023</year><month>08</month><day>15</day><volume>10</volume><issue>16</issue><fpage>14114</fpage><lpage>14127</lpage><pub-id pub-id-type="doi">10.1109/JIOT.2023.3265768</pub-id></nlm-citation></ref><ref id="ref103"><label>103</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Narwat</surname><given-names>N</given-names> </name><name name-style="western"><surname>Kumar</surname><given-names>H</given-names> </name><name name-style="western"><surname>Jadon</surname><given-names>JS</given-names> </name><name name-style="western"><surname>Singh</surname><given-names>A</given-names> </name></person-group><article-title>Multi-sensory stress detection system</article-title><conf-name>2024 14th International Conference on Cloud Computing, Data Science &#x0026; Engineering (Confluence)</conf-name><conf-date>Jan 18-19, 2024</conf-date><conf-loc>Noida, India</conf-loc><fpage>685</fpage><lpage>689</lpage><pub-id pub-id-type="doi">10.1109/Confluence60223.2024.10463214</pub-id></nlm-citation></ref><ref id="ref104"><label>104</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Kafkov&#x00E1;</surname><given-names>J</given-names> </name><name name-style="western"><surname>Pirn&#x00ED;k</surname><given-names>R</given-names> </name><name name-style="western"><surname>Janota</surname><given-names>A</given-names> </name><name name-style="western"><surname>Kuch&#x00E1;r</surname><given-names>P</given-names> </name></person-group><article-title>Stress classification utilising AI studio</article-title><conf-name>2025 26th International Carpathian Control Conference (ICCC)</conf-name><conf-date>May 19-21, 2025</conf-date><conf-loc>Star&#x00FD; Smokovec, High Tatras, Slovakia</conf-loc><fpage>1</fpage><lpage>5</lpage><pub-id pub-id-type="doi">10.1109/ICCC65605.2025.11022862</pub-id></nlm-citation></ref><ref id="ref105"><label>105</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Lopez</surname><given-names>R</given-names> </name><name name-style="western"><surname>Shrestha</surname><given-names>A</given-names> </name><name name-style="western"><surname>Hickey</surname><given-names>K</given-names> </name><etal/></person-group><article-title>Screening students for stress using fitbit data</article-title><conf-name>2024 IEEE International Conference on Big Data (BigData)</conf-name><conf-date>Dec 15-18, 2024</conf-date><conf-loc>Washington, DC</conf-loc><fpage>3931</fpage><lpage>3934</lpage><pub-id pub-id-type="doi">10.1109/BigData62323.2024.10825089</pub-id></nlm-citation></ref><ref id="ref106"><label>106</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Wilfred</surname><given-names>JJ</given-names> </name><name name-style="western"><surname>B</surname><given-names>P</given-names> </name><name name-style="western"><surname>Nirosha</surname><given-names>R</given-names> </name></person-group><article-title>Real-time stress detection and management using iot sensors and virtual reality technology</article-title><conf-name>2025 8th International Conference on Trends in Electronics and Informatics (ICOEI)</conf-name><conf-date>Apr 24-25, 2025</conf-date><pub-id pub-id-type="doi">10.1109/ICOEI65986.2025.11013460</pub-id></nlm-citation></ref><ref id="ref107"><label>107</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Jaiswal</surname><given-names>D</given-names> </name><name name-style="western"><surname>Mukhopadhyay</surname><given-names>S</given-names> </name><name name-style="western"><surname>Sharma</surname><given-names>V</given-names> </name></person-group><article-title>TinyStressNet: on-device stress assessment with wearable sensors on edge devices</article-title><conf-name>2024 IEEE International Conference on Pervasive Computing and Communications Workshops and other Affiliated Events (PerCom Workshops)</conf-name><conf-date>Mar 11-15, 2024</conf-date><conf-loc>Biarritz, France</conf-loc><fpage>166</fpage><lpage>171</lpage><pub-id pub-id-type="doi">10.1109/PerComWorkshops59983.2024.10502631</pub-id></nlm-citation></ref><ref id="ref108"><label>108</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gait&#x00E1;n-Padilla</surname><given-names>M</given-names> </name><name name-style="western"><surname>M&#x00FA;nera</surname><given-names>M</given-names> </name><name name-style="western"><surname>Jos&#x00E9; Pontes</surname><given-names>M</given-names> </name><name name-style="western"><surname>Eduardo Vieira Segatto</surname><given-names>M</given-names> </name><name name-style="western"><surname>Cifuentes</surname><given-names>CA</given-names> </name><name name-style="western"><surname>Diaz</surname><given-names>CAR</given-names> </name></person-group><article-title>Development of a polymeric optical fiber sensor for stress estimation: a comparative analysis between physiological sensors</article-title><source>IEEE Sensors J</source><year>2024</year><month>10</month><day>15</day><volume>24</volume><issue>20</issue><fpage>32140</fpage><lpage>32149</lpage><pub-id pub-id-type="doi">10.1109/JSEN.2024.3435399</pub-id></nlm-citation></ref><ref id="ref109"><label>109</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Gupta</surname><given-names>R</given-names> </name><name name-style="western"><surname>Bhongade</surname><given-names>A</given-names> </name><name name-style="western"><surname>Gandhi</surname><given-names>TK</given-names> </name></person-group><article-title>Multimodal wearable sensors-based stress and affective states prediction model</article-title><conf-name>2023 9th International Conference on Advanced Computing and Communication Systems (ICACCS)</conf-name><conf-date>Mar 17-18, 2023</conf-date><conf-loc>Coimbatore, India</conf-loc><fpage>30</fpage><lpage>35</lpage><pub-id pub-id-type="doi">10.1109/ICACCS57279.2023.10112973</pub-id></nlm-citation></ref><ref id="ref110"><label>110</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Beierle</surname><given-names>F</given-names> </name><name name-style="western"><surname>Pryss</surname><given-names>R</given-names> </name></person-group><article-title>Automating the development of stress detection systems</article-title><conf-name>2023 Congress in Computer Science, Computer Engineering, &#x0026; Applied Computing (CSCE)</conf-name><conf-date>Jul 24-27, 2023</conf-date><conf-loc>Las Vegas, NV</conf-loc><fpage>2694</fpage><lpage>2696</lpage><pub-id pub-id-type="doi">10.1109/CSCE60160.2023.00432</pub-id></nlm-citation></ref><ref id="ref111"><label>111</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Masrur</surname><given-names>N</given-names> </name><name name-style="western"><surname>Halder</surname><given-names>N</given-names> </name><name name-style="western"><surname>Rashid</surname><given-names>S</given-names> </name><name name-style="western"><surname>Setu</surname><given-names>JH</given-names> </name><name name-style="western"><surname>Islam</surname><given-names>A</given-names> </name><name name-style="western"><surname>Ahmed</surname><given-names>T</given-names> </name></person-group><article-title>Performance analysis of ensemble and DNN models for decoding mental stress utilizing ECG-based wearable data fusion</article-title><conf-name>2024 IEEE International Black Sea Conference on Communications and Networking (BlackSeaCom)</conf-name><conf-date>Jun 24-27, 2024</conf-date><conf-loc>Tbilisi, Georgia</conf-loc><fpage>276</fpage><lpage>279</lpage><pub-id pub-id-type="doi">10.1109/BlackSeaCom61746.2024.10646297</pub-id></nlm-citation></ref><ref id="ref112"><label>112</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Sakanti</surname><given-names>MM</given-names> </name><name name-style="western"><surname>Siniaev</surname><given-names>V</given-names> </name><name name-style="western"><surname>Amaris</surname><given-names>A</given-names> </name><name name-style="western"><surname>Luo</surname><given-names>WJ</given-names> </name><name name-style="western"><surname>Kuncoro</surname><given-names>CBD</given-names> </name></person-group><article-title>Psychological stress classification using extreme gradient boosting algorithm</article-title><conf-name>2024 15th International Conference on Information and Communication Technology Convergence (ICTC)</conf-name><conf-date>Oct 16-18, 2024</conf-date><conf-loc>Jeju Island, Republic of Korea</conf-loc><fpage>946</fpage><lpage>950</lpage><pub-id pub-id-type="doi">10.1109/ICTC62082.2024.10827020</pub-id></nlm-citation></ref><ref id="ref113"><label>113</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Shedage</surname><given-names>PS</given-names> </name><name name-style="western"><surname>Pouriyeh</surname><given-names>S</given-names> </name><name name-style="western"><surname>Parizi</surname><given-names>RM</given-names> </name><name name-style="western"><surname>Han</surname><given-names>M</given-names> </name><name name-style="western"><surname>Sannino</surname><given-names>G</given-names> </name><name name-style="western"><surname>Dehbozorgi</surname><given-names>N</given-names> </name></person-group><article-title>Stress detection using multimodal physiological signals with machine learning from wearable devices</article-title><conf-name>2024 IEEE Symposium on Computers and Communications (ISCC)</conf-name><conf-date>Jun 26-29, 2024</conf-date><conf-loc>Paris, France</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/ISCC61673.2024.10733703</pub-id></nlm-citation></ref><ref id="ref114"><label>114</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Gait&#x00E1;n-Padilla</surname><given-names>M</given-names> </name><name name-style="western"><surname>M&#x00FA;nera</surname><given-names>M</given-names> </name><name name-style="western"><surname>Cifuentes</surname><given-names>CA</given-names> </name><name name-style="western"><surname>Monteiro</surname><given-names>ME</given-names> </name><name name-style="western"><surname>Pontes</surname><given-names>MJ</given-names> </name><name name-style="western"><surname>Diaz</surname><given-names>CAR</given-names> </name></person-group><article-title>Stress classification using a low-cost optical fiber physiological sensor: a preliminary study</article-title><conf-name>2023 SBMO/IEEE MTT-S International Microwave and Optoelectronics Conference (IMOC)</conf-name><conf-date>Nov 5-9, 2023</conf-date><pub-id pub-id-type="doi">10.1109/IMOC57131.2023.10379658</pub-id></nlm-citation></ref><ref id="ref115"><label>115</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Tanwar</surname><given-names>R</given-names> </name><name name-style="western"><surname>Singh</surname><given-names>G</given-names> </name><name name-style="western"><surname>Pal</surname><given-names>PK</given-names> </name></person-group><article-title>FuSeR: fusion of wearables data for stress recognition using explainable artificial intelligence models</article-title><conf-name>2023 14th International Conference on Computing Communication and Networking Technologies (ICCCNT)</conf-name><conf-date>Jul 6-8, 2023</conf-date><conf-loc>Delhi, India</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/ICCCNT56998.2023.10307589</pub-id></nlm-citation></ref><ref id="ref116"><label>116</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gullapalli</surname><given-names>BT</given-names> </name><name name-style="western"><surname>Nathan</surname><given-names>V</given-names> </name><name name-style="western"><surname>Rahman</surname><given-names>MM</given-names> </name><name name-style="western"><surname>Kuang</surname><given-names>J</given-names> </name><name name-style="western"><surname>Gao</surname><given-names>JA</given-names> </name></person-group><article-title>A framework for extracting heart rate variability features from earbud-PPG for stress detection</article-title><source>Annu Int Conf IEEE Eng Med Biol Soc</source><year>2024</year><month>07</month><volume>2024</volume><fpage>1</fpage><lpage>5</lpage><pub-id pub-id-type="doi">10.1109/EMBC53108.2024.10782088</pub-id><pub-id pub-id-type="medline">40039377</pub-id></nlm-citation></ref><ref id="ref117"><label>117</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Sadruddin</surname><given-names>S</given-names> </name><name name-style="western"><surname>Khairnar</surname><given-names>VD</given-names> </name><name name-style="western"><surname>Vora</surname><given-names>DR</given-names> </name></person-group><article-title>Machine learning based assessment of mental stress using wearable sensors</article-title><conf-name>2024 11th International Conference on Computing for Sustainable Global Development (INDIACom)</conf-name><conf-date>Feb 28 to Mar 1, 2024</conf-date><conf-loc>New Delhi, India</conf-loc><fpage>351</fpage><lpage>355</lpage><pub-id pub-id-type="doi">10.23919/INDIACom61295.2024.10498597</pub-id></nlm-citation></ref><ref id="ref118"><label>118</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Jahanjoo</surname><given-names>A</given-names> </name><name name-style="western"><surname>TaheriNejad</surname><given-names>N</given-names> </name><name name-style="western"><surname>Aminifar</surname><given-names>A</given-names> </name></person-group><article-title>High-accuracy stress detection using wrist-worn PPG sensors</article-title><conf-name>2024 IEEE International Symposium on Circuits and Systems (ISCAS)</conf-name><conf-date>Jul 2, 2024</conf-date><conf-loc>Singapore, Singapore</conf-loc><fpage>1</fpage><lpage>5</lpage><pub-id pub-id-type="doi">10.1109/ISCAS58744.2024.10558012</pub-id></nlm-citation></ref><ref id="ref119"><label>119</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Parousidou</surname><given-names>V</given-names> </name><name name-style="western"><surname>Yfantidou</surname><given-names>S</given-names> </name><name name-style="western"><surname>Karagianni</surname><given-names>C</given-names> </name><name name-style="western"><surname>Vakali</surname><given-names>A</given-names> </name></person-group><article-title>Stress beats: a continuum of learning methods for personalized stress detection</article-title><conf-name>2023 IEEE International Conference on Web Intelligence and Intelligent Agent Technology (WI-IAT)</conf-name><conf-date>Oct 26-29, 2023</conf-date><conf-loc>Venice, Italy</conf-loc><fpage>40</fpage><lpage>47</lpage><pub-id pub-id-type="doi">10.1109/WI-IAT59888.2023.00012</pub-id></nlm-citation></ref><ref id="ref120"><label>120</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Karpagam</surname><given-names>GR</given-names> </name><name name-style="western"><surname>Vardhan V M</surname><given-names>H</given-names> </name><name name-style="western"><surname>K K</surname><given-names>K</given-names> </name><name name-style="western"><surname>P</surname><given-names>P</given-names> </name><name name-style="western"><surname>Ramesh</surname><given-names>P</given-names> </name><name name-style="western"><surname>Sathyendira B</surname><given-names>S</given-names> </name></person-group><article-title>Physiological data-based stress detection: from wrist sensors to cloud computing and user feedback integration</article-title><conf-name>2024 International Conference on Smart Systems for Electrical, Electronics, Communication and Computer Engineering (ICSSEECC)</conf-name><conf-date>Jun 28-29, 2024</conf-date><conf-loc>Coimbatore, India</conf-loc><fpage>386</fpage><lpage>391</lpage><pub-id pub-id-type="doi">10.1109/ICSSEECC61126.2024.10649521</pub-id></nlm-citation></ref><ref id="ref121"><label>121</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Shikha</surname><given-names>S</given-names> </name><name name-style="western"><surname>Sethia</surname><given-names>D</given-names> </name><name name-style="western"><surname>Indu</surname><given-names>S</given-names> </name></person-group><article-title>Optimization of wearable biosensor data for stress classification using machine learning and explainable AI</article-title><source>IEEE Access</source><year>2024</year><access-date>2026-02-13</access-date><volume>12</volume><fpage>169310</fpage><lpage>169327</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=10684201">https://ieeexplore.ieee.org/stamp/stamp.jsp?arnumber=10684201</ext-link></comment><pub-id pub-id-type="doi">10.1109/ACCESS.2024.3463742</pub-id></nlm-citation></ref><ref id="ref122"><label>122</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Hasanpoor</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Tarvirdizadeh</surname><given-names>B</given-names> </name><name name-style="western"><surname>Alipour</surname><given-names>K</given-names> </name><name name-style="western"><surname>Ghamari</surname><given-names>M</given-names> </name></person-group><article-title>Wavelet-based analysis of photoplethysmogram for stress detection using convolutional neural networks</article-title><conf-name>2023 11th RSI International Conference on Robotics and Mechatronics (ICRoM)</conf-name><conf-date>Dec 19-21, 2023</conf-date><conf-loc>Tehran, Islamic Republic of Iran</conf-loc><fpage>501</fpage><lpage>506</lpage><pub-id pub-id-type="doi">10.1109/ICRoM60803.2023.10412512</pub-id></nlm-citation></ref><ref id="ref123"><label>123</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Benita</surname><given-names>DS</given-names> </name><name name-style="western"><surname>Ebenezer</surname><given-names>AS</given-names> </name><name name-style="western"><surname>Susmitha</surname><given-names>L</given-names> </name><name name-style="western"><surname>Subathra</surname><given-names>MSP</given-names> </name><name name-style="western"><surname>Priya</surname><given-names>SJ</given-names> </name></person-group><article-title>Stress detection using cnn on the wesad dataset</article-title><conf-name>2024 International Conference on Emerging Systems and Intelligent Computing (ESIC)</conf-name><conf-date>Feb 9-10, 2024</conf-date><conf-loc>Bhubaneswar, India</conf-loc><fpage>308</fpage><lpage>313</lpage><pub-id pub-id-type="doi">10.1109/ESIC60604.2024.10481604</pub-id></nlm-citation></ref><ref id="ref124"><label>124</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Hsu</surname><given-names>A</given-names> </name></person-group><article-title>Quantifying exam stress progressions using electrodermal activity and machine learning</article-title><conf-name>2023 IEEE 23rd International Conference on Bioinformatics and Bioengineering (BIBE)</conf-name><conf-date>Dec 4-6, 2023</conf-date><conf-loc>Dayton, OH</conf-loc><fpage>434</fpage><lpage>438</lpage><pub-id pub-id-type="doi">10.1109/BIBE60311.2023.00077</pub-id></nlm-citation></ref><ref id="ref125"><label>125</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Carmisciano</surname><given-names>L</given-names> </name><name name-style="western"><surname>Boschi</surname><given-names>T</given-names> </name><name name-style="western"><surname>Chiaromonte</surname><given-names>F</given-names> </name><name name-style="western"><surname>Delmastro</surname><given-names>F</given-names> </name><name name-style="western"><surname>Vandin</surname><given-names>A</given-names> </name></person-group><article-title>Investigating functional data analysis for wearable physiological sensor data in stress evaluation</article-title><conf-name>2024 IEEE Symposium on Computers and Communications (ISCC)</conf-name><conf-date>Jun 26-29, 2024</conf-date><conf-loc>Paris, France</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/ISCC61673.2024.10733576</pub-id></nlm-citation></ref><ref id="ref126"><label>126</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Warrier</surname><given-names>LC</given-names> </name><name name-style="western"><surname>Ragesh</surname><given-names>GK</given-names> </name><name name-style="western"><surname>Ram Samarth</surname><given-names>BB</given-names> </name><name name-style="western"><surname>Gurumurthy</surname><given-names>K</given-names> </name></person-group><article-title>Privacy-preserved stress detection from wearables using federated learning</article-title><conf-name>2024 IEEE 5th India Council International Subsections Conference (INDISCON)</conf-name><conf-date>Aug 22-24, 2024</conf-date><conf-loc>Chandigarh, India</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/INDISCON62179.2024.10744249</pub-id></nlm-citation></ref><ref id="ref127"><label>127</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Calbert</surname><given-names>L</given-names> </name><name name-style="western"><surname>Tonekaboni</surname><given-names>NH</given-names> </name></person-group><article-title>Temporal dynamics of classroom stress: insights from wearable sensors and machine learning</article-title><conf-name>2024 International Conference on Machine Learning and Applications (ICMLA)</conf-name><conf-date>Dec 18-20, 2024</conf-date><conf-loc>Miami, FL</conf-loc><fpage>377</fpage><lpage>384</lpage><pub-id pub-id-type="doi">10.1109/ICMLA61862.2024.00057</pub-id></nlm-citation></ref><ref id="ref128"><label>128</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kumar</surname><given-names>S</given-names> </name><name name-style="western"><surname>Raj Chauhan</surname><given-names>A</given-names> </name><name name-style="western"><surname>Kumar</surname><given-names>A</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>G</given-names> </name></person-group><article-title>Resp-BoostNet: mental stress detection from biomarkers measurable by smartwatches using boosting neural network technique</article-title><source>IEEE Access</source><year>2024</year><volume>12</volume><fpage>149861</fpage><lpage>149874</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2024.3461588</pub-id></nlm-citation></ref><ref id="ref129"><label>129</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Hasanpoor</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Rostami</surname><given-names>A</given-names> </name><name name-style="western"><surname>Tarvirdizadeh</surname><given-names>B</given-names> </name><name name-style="western"><surname>Alipour</surname><given-names>K</given-names> </name><name name-style="western"><surname>Ghamari</surname><given-names>M</given-names> </name></person-group><article-title>Real-time stress detection via photoplethysmogram signals: implementation of a combined continuous wavelet transform and convolutional neural network on resource-constrained microcontrollers</article-title><conf-name>2024 32nd International Conference on Electrical Engineering (ICEE)</conf-name><conf-date>May 14-16, 2024</conf-date><pub-id pub-id-type="doi">10.1109/ICEE63041.2024.10668302</pub-id></nlm-citation></ref><ref id="ref130"><label>130</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Le Tran Thuan</surname><given-names>T</given-names> </name><name name-style="western"><surname>Nguyen</surname><given-names>PK</given-names> </name><name name-style="western"><surname>Gia</surname><given-names>QN</given-names> </name><name name-style="western"><surname>Tran</surname><given-names>AT</given-names> </name><name name-style="western"><surname>Le</surname><given-names>QK</given-names> </name></person-group><article-title>Machine learning algorithms for stress level analysis based on skin surface temperature and skin conductance</article-title><conf-name>2024 IEEE 6th Eurasia Conference on Biomedical Engineering, Healthcare and Sustainability (ECBIOS)</conf-name><conf-date>Jun 14-16, 2024</conf-date><pub-id pub-id-type="doi">10.1109/ECBIOS61468.2024.10885479</pub-id></nlm-citation></ref><ref id="ref131"><label>131</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Fernandez</surname><given-names>J</given-names> </name><name name-style="western"><surname>Mart&#x00ED;nez</surname><given-names>R</given-names> </name><name name-style="western"><surname>Innocenti</surname><given-names>B</given-names> </name><name name-style="western"><surname>L&#x00F3;pez</surname><given-names>B</given-names> </name></person-group><article-title>Contribution of EEG signals for students&#x2019; stress detection</article-title><source>IEEE Trans Affective Comput</source><year>2025</year><volume>16</volume><issue>2</issue><fpage>1235</fpage><lpage>1246</lpage><pub-id pub-id-type="doi">10.1109/TAFFC.2024.3503995</pub-id></nlm-citation></ref><ref id="ref132"><label>132</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Tanwar</surname><given-names>R</given-names> </name><name name-style="western"><surname>Pal</surname><given-names>PK</given-names> </name><name name-style="western"><surname>Singh</surname><given-names>G</given-names> </name></person-group><article-title>Wearables based personalised stress recognition using signal processing and hybrid deep learning model</article-title><conf-name>2024 International Conference on Computer, Electronics, Electrical Engineering &#x0026; their Applications (IC2E3)</conf-name><conf-date>Jun 6-7, 2024</conf-date><conf-loc>Srinagar Garhwal, India</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/IC2E362166.2024.10827149</pub-id></nlm-citation></ref><ref id="ref133"><label>133</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Huang</surname><given-names>M</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>H</given-names> </name><name name-style="western"><surname>Sun</surname><given-names>N</given-names> </name><etal/></person-group><article-title>Study of a hybrid CNN-SVM model for stress detection with automated heart rate variability feature extraction method</article-title><conf-name>2024 3rd International Conference on Health Big Data and Intelligent Healthcare (ICHIH)</conf-name><conf-date>Dec 13-15, 2024</conf-date><conf-loc>Zhuhai, China</conf-loc><fpage>316</fpage><lpage>319</lpage><pub-id pub-id-type="doi">10.1109/ICHIH63459.2024.11064910</pub-id></nlm-citation></ref><ref id="ref134"><label>134</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Oh</surname><given-names>K</given-names> </name><name name-style="western"><surname>Choi</surname><given-names>JK</given-names> </name><name name-style="western"><surname>Park</surname><given-names>H</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>S</given-names> </name></person-group><article-title>Personalized ensemble based stress detection using wearable sensor data</article-title><conf-name>2025 27th International Conference on Advanced Communications Technology (ICACT)</conf-name><conf-date>Feb 16-19, 2025</conf-date><conf-loc>Pyeong Chang, Korea, Republic of</conf-loc><fpage>470</fpage><lpage>475</lpage><pub-id pub-id-type="doi">10.23919/ICACT63878.2025.10936652</pub-id></nlm-citation></ref><ref id="ref135"><label>135</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Thapa</surname><given-names>B</given-names> </name><name name-style="western"><surname>Rivas</surname><given-names>M</given-names> </name><name name-style="western"><surname>Griffith</surname><given-names>H</given-names> </name><name name-style="western"><surname>Rathore</surname><given-names>H</given-names> </name></person-group><article-title>StressLLM: large language models for stress prediction via wearable sensor data</article-title><conf-name>2025 IEEE International Conference on Consumer Electronics (ICCE)</conf-name><conf-date>Jan 11-14, 2025</conf-date><conf-loc>Las Vegas, NV</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/ICCE63647.2025.10929774</pub-id></nlm-citation></ref><ref id="ref136"><label>136</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Abdelfattah</surname><given-names>E</given-names> </name><name name-style="western"><surname>Joshi</surname><given-names>S</given-names> </name><name name-style="western"><surname>Tiwari</surname><given-names>S</given-names> </name></person-group><article-title>Machine and deep learning models for stress detection using multimodal physiological data</article-title><source>IEEE Access</source><year>2025</year><volume>13</volume><fpage>4597</fpage><lpage>4608</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2024.3525459</pub-id></nlm-citation></ref><ref id="ref137"><label>137</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Tsiampa</surname><given-names>K</given-names> </name><name name-style="western"><surname>Zhu</surname><given-names>L</given-names> </name><name name-style="western"><surname>Spachos</surname><given-names>P</given-names> </name><name name-style="western"><surname>Plagianakos</surname><given-names>VP</given-names> </name></person-group><article-title>Investigating feasibility of stress detection from social media content through wearables</article-title><conf-name>GLOBECOM 2023 - 2023 IEEE Global Communications Conference</conf-name><conf-date>Dec 4-8, 2023</conf-date><conf-loc>Kuala Lumpur, Malaysia</conf-loc><fpage>1173</fpage><lpage>1178</lpage><pub-id pub-id-type="doi">10.1109/GLOBECOM54140.2023.10437938</pub-id></nlm-citation></ref><ref id="ref138"><label>138</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Fazeli</surname><given-names>S</given-names> </name><name name-style="western"><surname>Levine</surname><given-names>L</given-names> </name><name name-style="western"><surname>Beikzadeh</surname><given-names>M</given-names> </name><etal/></person-group><article-title>A self-supervised framework for improved data-driven monitoring of stress via multi-modal passive sensing</article-title><conf-name>2023 IEEE International Conference on Digital Health (ICDH)</conf-name><conf-date>Jul 2-8, 2023</conf-date><conf-loc>Chicago, IL</conf-loc><fpage>177</fpage><lpage>183</lpage><pub-id pub-id-type="doi">10.1109/ICDH60066.2023.00033</pub-id></nlm-citation></ref><ref id="ref139"><label>139</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Subathra</surname><given-names>P</given-names> </name><name name-style="western"><surname>Malarvizhi</surname><given-names>S</given-names> </name></person-group><article-title>Autoencoder-based human stress detection system using biological signals</article-title><conf-name>2024 International Conference on Recent Advances in Electrical, Electronics, Ubiquitous Communication, and Computational Intelligence (RAEEUCCI)</conf-name><conf-date>Apr 17-18, 2024</conf-date><conf-loc>Chennai, India</conf-loc><fpage>1</fpage><lpage>7</lpage><pub-id pub-id-type="doi">10.1109/RAEEUCCI61380.2024.10547833</pub-id></nlm-citation></ref><ref id="ref140"><label>140</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Shikha</surname><given-names>S</given-names> </name><name name-style="western"><surname>Sethia</surname><given-names>D</given-names> </name><name name-style="western"><surname>Indu</surname><given-names>S</given-names> </name></person-group><article-title>CorLMI-fsa: an efficient feature selection approach for stress classification using physiological signals</article-title><conf-name>2025 Fifth International Conference on Advances in Electrical, Computing, Communication and Sustainable Technologies (ICAECT)</conf-name><conf-date>Jan 9-10, 2025</conf-date><conf-loc>Bhilai, India</conf-loc><fpage>1</fpage><lpage>7</lpage><pub-id pub-id-type="doi">10.1109/ICAECT63952.2025.10958862</pub-id></nlm-citation></ref><ref id="ref141"><label>141</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Andreas</surname><given-names>A</given-names> </name><name name-style="western"><surname>Mavromoustakis</surname><given-names>CX</given-names> </name><name name-style="western"><surname>Song</surname><given-names>H</given-names> </name><name name-style="western"><surname>Batalla</surname><given-names>JM</given-names> </name></person-group><article-title>Optimisation of CNN through transferable online knowledge for stress and sentiment classification</article-title><source>IEEE Trans Consumer Electron</source><year>2024</year><volume>70</volume><issue>1</issue><fpage>3088</fpage><lpage>3097</lpage><pub-id pub-id-type="doi">10.1109/TCE.2023.3319111</pub-id></nlm-citation></ref><ref id="ref142"><label>142</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kasnesis</surname><given-names>P</given-names> </name><name name-style="western"><surname>Chatzigeorgiou</surname><given-names>C</given-names> </name><name name-style="western"><surname>Feidakis</surname><given-names>M</given-names> </name><name name-style="western"><surname>Guti&#x00E9;rrez</surname><given-names>&#x00C1;</given-names> </name><name name-style="western"><surname>Patrikakis</surname><given-names>CZ</given-names> </name></person-group><article-title>TranSenseFusers: a temporal CNN-transformer neural network family for explainable PPG-based stress detection</article-title><source>Biomed Signal Process Control</source><year>2025</year><month>04</month><volume>102</volume><fpage>107248</fpage><pub-id pub-id-type="doi">10.1016/j.bspc.2024.107248</pub-id></nlm-citation></ref><ref id="ref143"><label>143</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ciharova</surname><given-names>M</given-names> </name><name name-style="western"><surname>Amarti</surname><given-names>K</given-names> </name><name name-style="western"><surname>van Breda</surname><given-names>W</given-names> </name><etal/></person-group><article-title>Machine-learning detection of stress severity expressed on a continuous scale using acoustic, verbal, visual, and physiological data: lessons learned</article-title><source>Front Psychiatry</source><year>2025</year><volume>16</volume><fpage>1548287</fpage><pub-id pub-id-type="doi">10.3389/fpsyt.2025.1548287</pub-id><pub-id pub-id-type="medline">40585547</pub-id></nlm-citation></ref><ref id="ref144"><label>144</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Darwish</surname><given-names>BA</given-names> </name><name name-style="western"><surname>Salem</surname><given-names>NM</given-names> </name><name name-style="western"><surname>Kareem</surname><given-names>G</given-names> </name><name name-style="western"><surname>Mahmoud</surname><given-names>LN</given-names> </name><name name-style="western"><surname>Sadek</surname><given-names>I</given-names> </name></person-group><article-title>Evaluating the potential of wearable technology in early stress detection: a multimodal approach</article-title><source>medRxiv</source><comment>Preprint posted online on  Jul 21, 2024</comment><pub-id pub-id-type="doi">10.1101/2024.07.19.24310732</pub-id></nlm-citation></ref><ref id="ref145"><label>145</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Nuamah</surname><given-names>J</given-names> </name></person-group><article-title>Effect of recurrent task-induced acute stress on task performance, vagally mediated heart rate variability, and task-evoked pupil response</article-title><source>Int J Psychophysiol</source><year>2024</year><month>04</month><volume>198</volume><fpage>112325</fpage><pub-id pub-id-type="doi">10.1016/j.ijpsycho.2024.112325</pub-id><pub-id pub-id-type="medline">38447701</pub-id></nlm-citation></ref><ref id="ref146"><label>146</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sa-nguannarm</surname><given-names>P</given-names> </name><name name-style="western"><surname>Elbasani</surname><given-names>E</given-names> </name><name name-style="western"><surname>Kim</surname><given-names>JD</given-names> </name></person-group><article-title>Human activity recognition for analyzing stress behavior based on Bi-LSTM</article-title><source>THC</source><year>2023</year><month>09</month><day>15</day><volume>31</volume><issue>5</issue><fpage>1997</fpage><lpage>2007</lpage><pub-id pub-id-type="doi">10.3233/THC-235002</pub-id></nlm-citation></ref><ref id="ref147"><label>147</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Nelson</surname><given-names>BW</given-names> </name><name name-style="western"><surname>Harvie</surname><given-names>HMK</given-names> </name><name name-style="western"><surname>Jain</surname><given-names>B</given-names> </name><name name-style="western"><surname>Knight</surname><given-names>EL</given-names> </name><name name-style="western"><surname>Roos</surname><given-names>LE</given-names> </name><name name-style="western"><surname>Giuliano</surname><given-names>RJ</given-names> </name></person-group><article-title>Smartphone photoplethysmography pulse rate covaries with stress and anxiety during a digital acute social stressor</article-title><source>Psychosom Med</source><year>2023</year><month>09</month><day>1</day><volume>85</volume><issue>7</issue><fpage>577</fpage><lpage>584</lpage><pub-id pub-id-type="doi">10.1097/PSY.0000000000001178</pub-id><pub-id pub-id-type="medline">37409791</pub-id></nlm-citation></ref><ref id="ref148"><label>148</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Dahal</surname><given-names>K</given-names> </name><name name-style="western"><surname>Bogue-Jimenez</surname><given-names>B</given-names> </name><name name-style="western"><surname>Doblas</surname><given-names>A</given-names> </name></person-group><article-title>Global stress detection framework combining a reduced set of HRV features and random forest model</article-title><source>Sensors (Basel)</source><year>2023</year><month>05</month><day>31</day><volume>23</volume><issue>11</issue><fpage>5220</fpage><pub-id pub-id-type="doi">10.3390/s23115220</pub-id><pub-id pub-id-type="medline">37299947</pub-id></nlm-citation></ref><ref id="ref149"><label>149</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Aqajari</surname><given-names>SAH</given-names> </name><name name-style="western"><surname>Labbaf</surname><given-names>S</given-names> </name><name name-style="western"><surname>Tran</surname><given-names>PH</given-names> </name><etal/></person-group><article-title>Context-aware stress monitoring using wearable and mobile technologies in everyday settings</article-title><source>arXiv</source><comment>Preprint posted online on  Dec 14, 2023</comment><pub-id pub-id-type="doi">10.1101/2023.04.20.23288181</pub-id></nlm-citation></ref><ref id="ref150"><label>150</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Jiao</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>X</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>C</given-names> </name><etal/></person-group><article-title>Feasibility study for detection of mental stress and depression using pulse rate variability metrics via various durations</article-title><source>Biomed Signal Process Control</source><year>2023</year><month>01</month><volume>79</volume><fpage>104145</fpage><pub-id pub-id-type="doi">10.1016/j.bspc.2022.104145</pub-id></nlm-citation></ref><ref id="ref151"><label>151</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lotfi</surname><given-names>F</given-names> </name><name name-style="western"><surname>Lotfi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Lotfi</surname><given-names>M</given-names> </name><name name-style="western"><surname>Bjelica</surname><given-names>A</given-names> </name><name name-style="western"><surname>Bogdanovi&#x0107;</surname><given-names>Z</given-names> </name></person-group><article-title>Enhancing smart healthcare with female students&#x2019; stress and anxiety detection using machine learning</article-title><source>Psychol Health Med</source><year>2025</year><month>08</month><day>9</day><volume>30</volume><issue>7</issue><fpage>1465</fpage><lpage>1484</lpage><pub-id pub-id-type="doi">10.1080/13548506.2025.2484698</pub-id></nlm-citation></ref><ref id="ref152"><label>152</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Patan&#x00E8;</surname><given-names>G</given-names> </name><name name-style="western"><surname>Sorrenti</surname><given-names>A</given-names> </name><name name-style="western"><surname>Bellitto</surname><given-names>G</given-names> </name><name name-style="western"><surname>Palazzo</surname><given-names>S</given-names> </name></person-group><article-title>Continual learning strategies for personalized mental well-being monitoring from mobile sensing data</article-title><year>2025</year><month>10</month><day>27</day><conf-name>PILM &#x2019;25: Proceedings of the International Workshop on Personalized Incremental Learning in Medicine</conf-name><conf-date>Oct 27, 2025</conf-date><conf-loc>Dublin, Ireland</conf-loc><fpage>9</fpage><lpage>17</lpage><pub-id pub-id-type="doi">10.1145/3746259.3760432</pub-id></nlm-citation></ref><ref id="ref153"><label>153</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Subathra</surname><given-names>P</given-names> </name><name name-style="western"><surname>Malarvizhi</surname><given-names>S</given-names> </name><name name-style="western"><surname>Ferents Koni Jiavana</surname><given-names>K</given-names> </name><name name-style="western"><surname>Patil</surname><given-names>S</given-names> </name></person-group><article-title>A wearable electronic band for stress understanding using machine learning</article-title><source>IEEE Sensors J</source><year>2025</year><month>10</month><day>15</day><volume>25</volume><issue>20</issue><fpage>38639</fpage><lpage>38648</lpage><pub-id pub-id-type="doi">10.1109/JSEN.2025.3590380</pub-id></nlm-citation></ref><ref id="ref154"><label>154</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>van der Mee</surname><given-names>DJ</given-names> </name><name name-style="western"><surname>Koyuncu</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Lemmers-Jansen</surname><given-names>ILJ</given-names> </name></person-group><article-title>Are you stressed or just excited? What the Garmin Stress Score can say about your mood</article-title><source>Journal of Affective Disorders Reports</source><year>2025</year><month>07</month><volume>21</volume><fpage>100974</fpage><pub-id pub-id-type="doi">10.1016/j.jadr.2025.100974</pub-id></nlm-citation></ref><ref id="ref155"><label>155</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>De Angel</surname><given-names>V</given-names> </name><name name-style="western"><surname>Lewis</surname><given-names>S</given-names> </name><name name-style="western"><surname>White</surname><given-names>K</given-names> </name><etal/></person-group><article-title>Digital health tools for the passive monitoring of depression: a systematic review of methods</article-title><source>NPJ Digit Med</source><year>2022</year><month>01</month><day>11</day><volume>5</volume><issue>1</issue><fpage>3</fpage><pub-id pub-id-type="doi">10.1038/s41746-021-00548-8</pub-id><pub-id pub-id-type="medline">35017634</pub-id></nlm-citation></ref><ref id="ref156"><label>156</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Downes</surname><given-names>MJ</given-names> </name><name name-style="western"><surname>Brennan</surname><given-names>ML</given-names> </name><name name-style="western"><surname>Williams</surname><given-names>HC</given-names> </name><name name-style="western"><surname>Dean</surname><given-names>RS</given-names> </name></person-group><article-title>Development of a critical appraisal tool to assess the quality of cross-sectional studies (AXIS)</article-title><source>BMJ Open</source><year>2016</year><month>12</month><day>8</day><volume>6</volume><issue>12</issue><fpage>e011458</fpage><pub-id pub-id-type="doi">10.1136/bmjopen-2016-011458</pub-id><pub-id pub-id-type="medline">27932337</pub-id></nlm-citation></ref><ref id="ref157"><label>157</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Wells</surname><given-names>G</given-names> </name><name name-style="western"><surname>Shea</surname><given-names>B</given-names> </name><name name-style="western"><surname>O&#x2019;Connell</surname><given-names>D</given-names> </name><etal/></person-group><article-title>The Newcastle-Ottawa Scale (NOS) for assessing the quality of nonrandomized studies in meta- analysis</article-title><access-date>2026-02-13</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.researchgate.net/publication/261773681_The_Newcastle-Ottawa_Scale_NOS_for_Assessing_the_Quality_of_Non-Randomized_Studies_in_Meta-Analysis">https://www.researchgate.net/publication/261773681_The_Newcastle-Ottawa_Scale_NOS_for_Assessing_the_Quality_of_Non-Randomized_Studies_in_Meta-Analysis</ext-link></comment></nlm-citation></ref><ref id="ref158"><label>158</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gagliardi</surname><given-names>AR</given-names> </name><name name-style="western"><surname>Berta</surname><given-names>W</given-names> </name><name name-style="western"><surname>Kothari</surname><given-names>A</given-names> </name><name name-style="western"><surname>Boyko</surname><given-names>J</given-names> </name><name name-style="western"><surname>Urquhart</surname><given-names>R</given-names> </name></person-group><article-title>Integrated knowledge translation (IKT) in health care: a scoping review</article-title><source>Implementation Sci</source><year>2015</year><month>12</month><volume>11</volume><issue>1</issue><fpage>38</fpage><pub-id pub-id-type="doi">10.1186/s13012-016-0399-1</pub-id></nlm-citation></ref><ref id="ref159"><label>159</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Shaheen</surname><given-names>F</given-names> </name><name name-style="western"><surname>Verma</surname><given-names>B</given-names> </name><name name-style="western"><surname>Asafuddoula</surname><given-names>M</given-names> </name></person-group><article-title>Impact of automatic feature extraction in deep learning architecture</article-title><conf-name>2016 International Conference on Digital Image Computing</conf-name><conf-date>Nov 30 to Dec 26, 2016</conf-date><conf-loc>Gold Coast, Australia</conf-loc><fpage>1</fpage><lpage>8</lpage><pub-id pub-id-type="doi">10.1109/DICTA.2016.7797053</pub-id></nlm-citation></ref><ref id="ref160"><label>160</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Allen</surname><given-names>AP</given-names> </name><name name-style="western"><surname>Kennedy</surname><given-names>PJ</given-names> </name><name name-style="western"><surname>Dockray</surname><given-names>S</given-names> </name><name name-style="western"><surname>Cryan</surname><given-names>JF</given-names> </name><name name-style="western"><surname>Dinan</surname><given-names>TG</given-names> </name><name name-style="western"><surname>Clarke</surname><given-names>G</given-names> </name></person-group><article-title>The Trier Social Stress Test: principles and practice</article-title><source>Neurobiol Stress</source><year>2017</year><month>02</month><volume>6</volume><fpage>113</fpage><lpage>126</lpage><pub-id pub-id-type="doi">10.1016/j.ynstr.2016.11.001</pub-id><pub-id pub-id-type="medline">28229114</pub-id></nlm-citation></ref><ref id="ref161"><label>161</label><nlm-citation citation-type="web"><article-title>WESAD (wearable stress and affect detection)</article-title><source>Kaggle</source><access-date>2023-06-29</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://ubicomp.eti.uni-siegen.de/home/datasets/icmi18/">https://ubicomp.eti.uni-siegen.de/home/datasets/icmi18/</ext-link></comment></nlm-citation></ref><ref id="ref162"><label>162</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Zhang</surname><given-names>P</given-names> </name><name name-style="western"><surname>Jung</surname><given-names>G</given-names> </name><name name-style="western"><surname>Alikhanov</surname><given-names>J</given-names> </name><name name-style="western"><surname>Ahmed</surname><given-names>U</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>U</given-names> </name></person-group><article-title>A reproducible stress prediction pipeline with mobile sensor data</article-title><source>Proc ACM Interact Mob Wearable Ubiquitous Technol</source><year>2024</year><month>08</month><day>22</day><volume>8</volume><issue>3</issue><fpage>1</fpage><lpage>35</lpage><pub-id pub-id-type="doi">10.1145/3678578</pub-id><pub-id pub-id-type="medline">40093941</pub-id></nlm-citation></ref><ref id="ref163"><label>163</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Patle</surname><given-names>A</given-names> </name><name name-style="western"><surname>Chouhan</surname><given-names>DS</given-names> </name></person-group><article-title>SVM kernel functions for classification</article-title><conf-name>2013 International Conference on Advances in Technology and Engineering (ICATE 2013)</conf-name><conf-date>Jan 23-25, 2013</conf-date><conf-loc>Mumbai</conf-loc><fpage>1</fpage><lpage>9</lpage><pub-id pub-id-type="doi">10.1109/ICAdTE.2013.6524743</pub-id></nlm-citation></ref><ref id="ref164"><label>164</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>YF</given-names> </name><name name-style="western"><surname>Kwok</surname><given-names>J</given-names> </name><name name-style="western"><surname>Zhou</surname><given-names>ZH</given-names> </name></person-group><article-title>Cost-sensitive semi-supervised support vector machine</article-title><source>AAAI</source><year>2010</year><month>07</month><day>3</day><volume>24</volume><issue>1</issue><fpage>500</fpage><lpage>505</lpage><pub-id pub-id-type="doi">10.1609/aaai.v24i1.7661</pub-id></nlm-citation></ref><ref id="ref165"><label>165</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ayeni</surname><given-names>JA</given-names> </name><collab>Department of Computer Sciences, Ajayi Crowther University, Oyo, Nigeria</collab></person-group><article-title>Convolutional neural network (CNN): the architecture and applications</article-title><source>Appl J Phys Sci</source><year>2022</year><month>12</month><day>30</day><volume>4</volume><issue>4</issue><fpage>42</fpage><lpage>50</lpage><pub-id pub-id-type="doi">10.31248/AJPS2022.085</pub-id></nlm-citation></ref><ref id="ref166"><label>166</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>de Arriba-P&#x00E9;rez</surname><given-names>F</given-names> </name><name name-style="western"><surname>Santos-Gago</surname><given-names>JM</given-names> </name><name name-style="western"><surname>Caeiro-Rodr&#x00ED;guez</surname><given-names>M</given-names> </name><name name-style="western"><surname>Ramos-Merino</surname><given-names>M</given-names> </name></person-group><article-title>Study of stress detection and proposal of stress-related features using commercial-off-the-shelf wrist wearables</article-title><source>J Ambient Intell Human Comput</source><year>2019</year><month>12</month><volume>10</volume><issue>12</issue><fpage>4925</fpage><lpage>4945</lpage><pub-id pub-id-type="doi">10.1007/s12652-019-01188-3</pub-id></nlm-citation></ref><ref id="ref167"><label>167</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Setz</surname><given-names>C</given-names> </name><name name-style="western"><surname>Arnrich</surname><given-names>B</given-names> </name><name name-style="western"><surname>Schumm</surname><given-names>J</given-names> </name><name name-style="western"><surname>La Marca</surname><given-names>R</given-names> </name><name name-style="western"><surname>Troster</surname><given-names>G</given-names> </name><name name-style="western"><surname>Ehlert</surname><given-names>U</given-names> </name></person-group><article-title>Discriminating stress from cognitive load using a wearable EDA device</article-title><source>IEEE Trans Inform Technol Biomed</source><year>2009</year><volume>14</volume><issue>2</issue><fpage>410</fpage><lpage>417</lpage><pub-id pub-id-type="doi">10.1109/TITB.2009.2036164</pub-id></nlm-citation></ref><ref id="ref168"><label>168</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Xu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>H</given-names> </name><etal/></person-group><article-title>GLOBEM: cross-dataset generalization of longitudinal human behavior modeling</article-title><source>Proc ACM Interact Mob Wearable Ubiquitous Technol</source><year>2022</year><month>01</month><day>11</day><volume>6</volume><issue>4</issue><fpage>1</fpage><lpage>34</lpage><pub-id pub-id-type="doi">10.1145/3569485</pub-id></nlm-citation></ref><ref id="ref169"><label>169</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Sarker</surname><given-names>H</given-names> </name><name name-style="western"><surname>Tyburski</surname><given-names>M</given-names> </name><name name-style="western"><surname>Rahman</surname><given-names>MM</given-names> </name><etal/></person-group><article-title>Finding significant stress episodes in a discontinuous time series of rapidly varying mobile sensor data</article-title><year>2016</year><month>05</month><day>7</day><access-date>2026-03-19</access-date><conf-name>CHI &#x2019;16: Proceedings of the 2016 CHI Conference on Human Factors in Computing Systems</conf-name><conf-date>May 7, 2016</conf-date><conf-loc>San Jose, CA</conf-loc><fpage>4489</fpage><lpage>4501</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/2858036">https://dl.acm.org/doi/proceedings/10.1145/2858036</ext-link></comment><pub-id pub-id-type="doi">10.1145/2858036.2858218</pub-id></nlm-citation></ref><ref id="ref170"><label>170</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Perini</surname><given-names>R</given-names> </name><name name-style="western"><surname>Veicsteinas</surname><given-names>A</given-names> </name></person-group><article-title>Heart rate variability and autonomic activity at rest and during exercise in various physiological conditions</article-title><source>Eur J Appl Physiol</source><year>2003</year><month>10</month><volume>90</volume><issue>3-4</issue><fpage>317</fpage><lpage>325</lpage><pub-id pub-id-type="doi">10.1007/s00421-003-0953-9</pub-id><pub-id pub-id-type="medline">13680241</pub-id></nlm-citation></ref><ref id="ref171"><label>171</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Mishra</surname><given-names>V</given-names> </name><name name-style="western"><surname>Hao</surname><given-names>T</given-names> </name><name name-style="western"><surname>Sun</surname><given-names>S</given-names> </name><etal/></person-group><article-title>Investigating the role of context in perceived stress detection in the wild</article-title><year>2018</year><month>10</month><day>8</day><access-date>2026-03-19</access-date><conf-name>UbiComp &#x2019;18: Proceedings of the 2018 ACM International Joint Conference and 2018 International Symposium on Pervasive and Ubiquitous Computing and Wearable Computers</conf-name><conf-date>Oct 8, 2018</conf-date><conf-loc>Singapore</conf-loc><fpage>1708</fpage><lpage>1716</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3267305">https://dl.acm.org/doi/proceedings/10.1145/3267305</ext-link></comment><pub-id pub-id-type="doi">10.1145/3267305.3267537</pub-id></nlm-citation></ref><ref id="ref172"><label>172</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>M&#x00F6;ller</surname><given-names>A</given-names> </name><name name-style="western"><surname>Kranz</surname><given-names>M</given-names> </name><name name-style="western"><surname>Schmid</surname><given-names>B</given-names> </name><name name-style="western"><surname>Roalter</surname><given-names>L</given-names> </name><name name-style="western"><surname>Diewald</surname><given-names>S</given-names> </name></person-group><article-title>Investigating self-reporting behavior in long-term studies</article-title><year>2013</year><month>04</month><day>27</day><access-date>2026-03-19</access-date><conf-name>CHI &#x2019;13: Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</conf-name><conf-date>Apr 27, 2013</conf-date><conf-loc>Paris, France</conf-loc><fpage>2931</fpage><lpage>2940</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/2470654">https://dl.acm.org/doi/proceedings/10.1145/2470654</ext-link></comment><pub-id pub-id-type="doi">10.1145/2470654.2481406</pub-id></nlm-citation></ref><ref id="ref173"><label>173</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Fass-Holmes</surname><given-names>B</given-names> </name></person-group><article-title>Survey fatigue--what is its role in undergraduates&#x2019; survey participation and response rates?</article-title><source>J Interdiscip Stud Educ</source><year>2022</year><access-date>2026-02-13</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://eric.ed.gov/?id=EJ1344904">https://eric.ed.gov/?id=EJ1344904</ext-link></comment></nlm-citation></ref><ref id="ref174"><label>174</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wen</surname><given-names>CKF</given-names> </name><name name-style="western"><surname>Schneider</surname><given-names>S</given-names> </name><name name-style="western"><surname>Stone</surname><given-names>AA</given-names> </name><name name-style="western"><surname>Spruijt-Metz</surname><given-names>D</given-names> </name></person-group><article-title>Compliance with mobile ecological momentary assessment protocols in children and adolescents: a systematic review and meta-analysis</article-title><source>J Med Internet Res</source><year>2017</year><month>04</month><day>26</day><volume>19</volume><issue>4</issue><fpage>e132</fpage><pub-id pub-id-type="doi">10.2196/jmir.6641</pub-id><pub-id pub-id-type="medline">28446418</pub-id></nlm-citation></ref><ref id="ref175"><label>175</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Riley</surname><given-names>RD</given-names> </name><name name-style="western"><surname>Ensor</surname><given-names>J</given-names> </name><name name-style="western"><surname>Snell</surname><given-names>KIE</given-names> </name><etal/></person-group><article-title>Importance of sample size on the quality and utility of AI-based prediction models for healthcare</article-title><source>Lancet Digit Health</source><year>2025</year><month>06</month><volume>7</volume><issue>6</issue><fpage>100857</fpage><pub-id pub-id-type="doi">10.1016/j.landig.2025.01.013</pub-id><pub-id pub-id-type="medline">40461350</pub-id></nlm-citation></ref><ref id="ref176"><label>176</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kaplan</surname><given-names>RM</given-names> </name><name name-style="western"><surname>Chambers</surname><given-names>DA</given-names> </name><name name-style="western"><surname>Glasgow</surname><given-names>RE</given-names> </name></person-group><article-title>Big data and large sample size: a cautionary note on the potential for bias</article-title><source>Clinical Translational Sci</source><year>2014</year><month>08</month><access-date>2026-03-19</access-date><volume>7</volume><issue>4</issue><fpage>342</fpage><lpage>346</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://ascpt.onlinelibrary.wiley.com/toc/17528062/7/4">https://ascpt.onlinelibrary.wiley.com/toc/17528062/7/4</ext-link></comment><pub-id pub-id-type="doi">10.1111/cts.12178</pub-id></nlm-citation></ref><ref id="ref177"><label>177</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Schmidt</surname><given-names>P</given-names> </name><name name-style="western"><surname>Reiss</surname><given-names>A</given-names> </name><name name-style="western"><surname>Duerichen</surname><given-names>R</given-names> </name><name name-style="western"><surname>Marberger</surname><given-names>C</given-names> </name><name name-style="western"><surname>Van Laerhoven</surname><given-names>K</given-names> </name></person-group><article-title>Introducing WESAD, a multimodal dataset for wearable stress and affect detection</article-title><year>2018</year><month>10</month><day>2</day><access-date>2026-03-19</access-date><conf-name>ICMI &#x2019;18: Proceedings of the 20th ACM International Conference on Multimodal Interaction</conf-name><conf-date>Oct 2, 2018</conf-date><conf-loc>Boulder, CO</conf-loc><fpage>400</fpage><lpage>408</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://dl.acm.org/doi/proceedings/10.1145/3242969">https://dl.acm.org/doi/proceedings/10.1145/3242969</ext-link></comment><pub-id pub-id-type="doi">10.1145/3242969.3242985</pub-id></nlm-citation></ref><ref id="ref178"><label>178</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Xu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Chikersal</surname><given-names>P</given-names> </name><name name-style="western"><surname>Doryab</surname><given-names>A</given-names> </name><etal/></person-group><article-title>Leveraging routine behavior and contextually-filtered features for depression detection among college students</article-title><source>Proc ACM Interact Mob Wearable Ubiquitous Technol</source><year>2019</year><month>09</month><day>9</day><volume>3</volume><issue>3</issue><fpage>1</fpage><lpage>33</lpage><pub-id pub-id-type="doi">10.1145/3351274</pub-id></nlm-citation></ref><ref id="ref179"><label>179</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Xu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Chikersal</surname><given-names>P</given-names> </name><name name-style="western"><surname>Dutcher</surname><given-names>JM</given-names> </name><etal/></person-group><article-title>Leveraging collaborative-filtering for personalized behavior modeling: a case study of depression detection among college students</article-title><source>Proc ACM Interact Mob Wearable Ubiquitous Technol</source><year>2021</year><month>03</month><day>19</day><volume>5</volume><issue>1</issue><fpage>1</fpage><lpage>27</lpage><pub-id pub-id-type="doi">10.1145/3448107</pub-id></nlm-citation></ref><ref id="ref180"><label>180</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Salmasi</surname><given-names>V</given-names> </name><name name-style="western"><surname>Lii</surname><given-names>TR</given-names> </name><name name-style="western"><surname>Humphreys</surname><given-names>K</given-names> </name><name name-style="western"><surname>Reddy</surname><given-names>V</given-names> </name><name name-style="western"><surname>Mackey</surname><given-names>SC</given-names> </name></person-group><article-title>A literature review of the impact of exclusion criteria on generalizability of clinical trial findings to patients with chronic pain</article-title><source>PR9</source><year>2022</year><volume>7</volume><issue>6</issue><fpage>e1050</fpage><pub-id pub-id-type="doi">10.1097/PR9.0000000000001050</pub-id></nlm-citation></ref><ref id="ref181"><label>181</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Humphreys</surname><given-names>K</given-names> </name></person-group><article-title>A review of the impact of exclusion criteria on the generalizability of schizophrenia treatment research</article-title><source>Clin Schizophr Relat Psychoses</source><year>2017</year><volume>11</volume><issue>1</issue><fpage>49</fpage><lpage>57</lpage><pub-id pub-id-type="doi">10.3371/1935-1232-11.1.49</pub-id><pub-id pub-id-type="medline">28548580</pub-id></nlm-citation></ref><ref id="ref182"><label>182</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wong</surname><given-names>JJ</given-names> </name><name name-style="western"><surname>Jones</surname><given-names>N</given-names> </name><name name-style="western"><surname>Timko</surname><given-names>C</given-names> </name><name name-style="western"><surname>Humphreys</surname><given-names>K</given-names> </name></person-group><article-title>Exclusion criteria and generalizability in bipolar disorder treatment trials</article-title><source>Contemp Clin Trials Commun</source><year>2018</year><month>03</month><volume>9</volume><fpage>130</fpage><lpage>134</lpage><pub-id pub-id-type="doi">10.1016/j.conctc.2018.01.009</pub-id></nlm-citation></ref><ref id="ref183"><label>183</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Alegr&#x00ED;a</surname><given-names>M</given-names> </name><name name-style="western"><surname>NeMoyer</surname><given-names>A</given-names> </name><name name-style="western"><surname>Falg&#x00E0;s Bagu&#x00E9;</surname><given-names>I</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Alvarez</surname><given-names>K</given-names> </name></person-group><article-title>Social determinants of mental health: where we are and where we need to go</article-title><source>Curr Psychiatry Rep</source><year>2018</year><month>09</month><day>17</day><volume>20</volume><issue>11</issue><fpage>95</fpage><pub-id pub-id-type="doi">10.1007/s11920-018-0969-9</pub-id><pub-id pub-id-type="medline">30221308</pub-id></nlm-citation></ref><ref id="ref184"><label>184</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>McEwen</surname><given-names>BS</given-names> </name><name name-style="western"><surname>Gianaros</surname><given-names>PJ</given-names> </name></person-group><article-title>Central role of the brain in stress and adaptation: links to socioeconomic status, health, and disease</article-title><source>Ann N Y Acad Sci</source><year>2010</year><month>02</month><volume>1186</volume><issue>1</issue><fpage>190</fpage><lpage>222</lpage><pub-id pub-id-type="doi">10.1111/j.1749-6632.2009.05331.x</pub-id><pub-id pub-id-type="medline">20201874</pub-id></nlm-citation></ref><ref id="ref185"><label>185</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Jackson</surname><given-names>RW</given-names> </name><name name-style="western"><surname>Treiber</surname><given-names>FA</given-names> </name><name name-style="western"><surname>Turner</surname><given-names>JR</given-names> </name><name name-style="western"><surname>Davis</surname><given-names>H</given-names> </name><name name-style="western"><surname>Strong</surname><given-names>WB</given-names> </name></person-group><article-title>Effects of race, sex, and socioeconomic status upon cardiovascular stress responsivity and recovery in youth</article-title><source>Int J Psychophysiol</source><year>1999</year><month>01</month><volume>31</volume><issue>2</issue><fpage>111</fpage><lpage>119</lpage><pub-id pub-id-type="doi">10.1016/S0167-8760(98)00044-0</pub-id></nlm-citation></ref><ref id="ref186"><label>186</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Braveman</surname><given-names>P</given-names> </name><name name-style="western"><surname>Egerter</surname><given-names>S</given-names> </name><name name-style="western"><surname>Williams</surname><given-names>DR</given-names> </name></person-group><article-title>The social determinants of health: coming of age</article-title><source>Annu Rev Public Health</source><year>2011</year><volume>32</volume><issue>1</issue><fpage>381</fpage><lpage>398</lpage><pub-id pub-id-type="doi">10.1146/annurev-publhealth-031210-101218</pub-id><pub-id pub-id-type="medline">21091195</pub-id></nlm-citation></ref><ref id="ref187"><label>187</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Chien</surname><given-names>WS</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>CC</given-names> </name></person-group><article-title>Understanding missing data bias in longitudinal mental stress detection</article-title><conf-name>2024 IEEE 20th International Conference on Body Sensor Networks (BSN)</conf-name><conf-date>Oct 15-17, 2024</conf-date><conf-loc>Chicago, IL</conf-loc><fpage>1</fpage><lpage>4</lpage><pub-id pub-id-type="doi">10.1109/BSN63547.2024.10780500</pub-id></nlm-citation></ref><ref id="ref188"><label>188</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>McCombe</surname><given-names>N</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>S</given-names> </name><name name-style="western"><surname>Ding</surname><given-names>X</given-names> </name><etal/></person-group><article-title>Practical strategies for extreme missing data imputation in dementia diagnosis</article-title><source>IEEE J Biomed Health Inform</source><year>2021</year><volume>26</volume><issue>2</issue><fpage>818</fpage><lpage>827</lpage><pub-id pub-id-type="doi">10.1109/JBHI.2021.3098511</pub-id></nlm-citation></ref><ref id="ref189"><label>189</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ibrahim</surname><given-names>JG</given-names> </name><name name-style="western"><surname>Molenberghs</surname><given-names>G</given-names> </name></person-group><article-title>Missing data methods in longitudinal studies: a review</article-title><source>TEST (Madr)</source><year>2009</year><month>05</month><volume>18</volume><issue>1</issue><fpage>1</fpage><lpage>43</lpage><pub-id pub-id-type="doi">10.1007/s11749-009-0138-x</pub-id></nlm-citation></ref><ref id="ref190"><label>190</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Abd-Alrazaq</surname><given-names>A</given-names> </name><name name-style="western"><surname>Alajlani</surname><given-names>M</given-names> </name><name name-style="western"><surname>Ahmad</surname><given-names>R</given-names> </name><etal/></person-group><article-title>The performance of wearable AI in detecting stress among students: systematic review and meta-analysis</article-title><source>J Med Internet Res</source><year>2024</year><month>01</month><day>31</day><volume>26</volume><fpage>e52622</fpage><pub-id pub-id-type="doi">10.2196/52622</pub-id><pub-id pub-id-type="medline">38294846</pub-id></nlm-citation></ref><ref id="ref191"><label>191</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gedam</surname><given-names>S</given-names> </name><name name-style="western"><surname>Paul</surname><given-names>S</given-names> </name></person-group><article-title>A review on mental stress detection using wearable sensors and machine learning techniques</article-title><source>IEEE Access</source><year>2021</year><volume>9</volume><fpage>84045</fpage><lpage>84066</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2021.3085502</pub-id></nlm-citation></ref><ref id="ref192"><label>192</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Giannakakis</surname><given-names>G</given-names> </name><name name-style="western"><surname>Grigoriadis</surname><given-names>D</given-names> </name><name name-style="western"><surname>Giannakaki</surname><given-names>K</given-names> </name><name name-style="western"><surname>Simantiraki</surname><given-names>O</given-names> </name><name name-style="western"><surname>Roniotis</surname><given-names>A</given-names> </name><name name-style="western"><surname>Tsiknakis</surname><given-names>M</given-names> </name></person-group><article-title>Review on psychological stress detection using biosignals</article-title><source>IEEE Trans Affective Comput</source><year>2022</year><month>01</month><day>1</day><volume>13</volume><issue>1</issue><fpage>440</fpage><lpage>460</lpage><pub-id pub-id-type="doi">10.1109/TAFFC.2019.2927337</pub-id></nlm-citation></ref><ref id="ref193"><label>193</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hickey</surname><given-names>BA</given-names> </name><name name-style="western"><surname>Chalmers</surname><given-names>T</given-names> </name><name name-style="western"><surname>Newton</surname><given-names>P</given-names> </name><etal/></person-group><article-title>Smart devices and wearable technologies to detect and monitor mental health conditions and stress: a systematic review</article-title><source>Sensors (Basel)</source><year>2021</year><month>05</month><day>16</day><volume>21</volume><issue>10</issue><fpage>3461</fpage><pub-id pub-id-type="doi">10.3390/s21103461</pub-id><pub-id pub-id-type="medline">34065620</pub-id></nlm-citation></ref><ref id="ref194"><label>194</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Shanmugasundaram</surname><given-names>G</given-names> </name><name name-style="western"><surname>Yazhini</surname><given-names>S</given-names> </name><name name-style="western"><surname>Hemapratha</surname><given-names>E</given-names> </name><name name-style="western"><surname>Nithya</surname><given-names>S</given-names> </name></person-group><article-title>A comprehensive review on stress detection techniques</article-title><conf-name>2019 IEEE International Conference on System, Computation, Automation and Networking (ICSCAN)</conf-name><conf-date>Mar 29-30, 2019</conf-date><conf-loc>Pondicherry, India</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/ICSCAN.2019.8878795</pub-id></nlm-citation></ref><ref id="ref195"><label>195</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Onnela</surname><given-names>JP</given-names> </name></person-group><article-title>Opportunities and challenges in the collection and analysis of digital phenotyping data</article-title><source>Neuropsychopharmacology</source><year>2021</year><month>01</month><volume>46</volume><issue>1</issue><fpage>45</fpage><lpage>54</lpage><pub-id pub-id-type="doi">10.1038/s41386-020-0771-3</pub-id><pub-id pub-id-type="medline">32679583</pub-id></nlm-citation></ref><ref id="ref196"><label>196</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Xu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>H</given-names> </name><name name-style="western"><surname>Sefidgar</surname><given-names>Y</given-names> </name><etal/></person-group><article-title>GLOBEM dataset: multi-year datasets for longitudinal human behavior modeling generalization</article-title><source>arXiv</source><access-date>2024-10-03</access-date><comment>Preprint posted online on  Nov 4, 2023</comment><comment><ext-link ext-link-type="uri" xlink:href="http://arxiv.org/abs/2211.02733">http://arxiv.org/abs/2211.02733</ext-link></comment></nlm-citation></ref><ref id="ref197"><label>197</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Gjoreski</surname><given-names>M</given-names> </name><name name-style="western"><surname>Gjoreski</surname><given-names>H</given-names> </name><name name-style="western"><surname>Lu&#x0161;trek</surname><given-names>M</given-names> </name><name name-style="western"><surname>Gams</surname><given-names>M</given-names> </name></person-group><article-title>Continuous stress detection using a wrist device: in laboratory and real life</article-title><year>2016</year><conf-name>UbiComp &#x2019;16: Proceedings of the 2016 ACM International Joint Conference on Pervasive and Ubiquitous Computing: Adjunct</conf-name><conf-date>Sep 12-16, 2016</conf-date><conf-loc>Heidelberg, Germany</conf-loc><fpage>1185</fpage><lpage>1193</lpage><pub-id pub-id-type="doi">10.1145/2968219.2968306</pub-id></nlm-citation></ref></ref-list><app-group><supplementary-material id="app1"><label>Multimedia Appendix 1</label><p>Search terms and phrases.</p><media xlink:href="mhealth_v14i1e64144_app1.docx" xlink:title="DOCX File, 38 KB"/></supplementary-material><supplementary-material id="app2"><label>Multimedia Appendix 2</label><p>Quality assessment scoring details.</p><media xlink:href="mhealth_v14i1e64144_app2.docx" xlink:title="DOCX File, 38 KB"/></supplementary-material><supplementary-material id="app3"><label>Multimedia Appendix 3</label><p>Quality scores by paper.</p><media xlink:href="mhealth_v14i1e64144_app3.docx" xlink:title="DOCX File, 60 KB"/></supplementary-material><supplementary-material id="app4"><label>Multimedia Appendix 4</label><p>Study key and publication information.</p><media xlink:href="mhealth_v14i1e64144_app4.docx" xlink:title="DOCX File, 168 KB"/></supplementary-material><supplementary-material id="app5"><label>Checklist 1</label><p>PRISMA-ScR checklist.</p><media xlink:href="mhealth_v14i1e64144_app5.docx" xlink:title="DOCX File, 250 KB"/></supplementary-material></app-group></back></article>