<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article xmlns:xlink="http://www.w3.org/1999/xlink" article-type="research-article" dtd-version="2.0">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMIR Human Factors</journal-id>
      <journal-id journal-id-type="nlm-ta">JMIR Hum Factors</journal-id>
      <journal-title>JMIR Human Factors</journal-title>
      <issn pub-type="epub">2292-9495</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v9i2e34606</article-id>
      <article-id pub-id-type="pmid">35475781</article-id>
      <article-id pub-id-type="doi">10.2196/34606</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Designing Tangibles to Support Emotion Logging for Older Adults: Development and Usability Study</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Kushniruk</surname>
            <given-names>Andre</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Joseph</surname>
            <given-names>Amanda</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Stringer</surname>
            <given-names>Eleah</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author" corresp="yes" equal-contrib="yes">
          <name name-style="western">
            <surname>Gooch</surname>
            <given-names>Daniel</given-names>
          </name>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>School of Computing and Communications</institution>
            <institution>The Open University</institution>
            <addr-line>Walton Hall</addr-line>
            <addr-line>Milton Keynes, MK7 6AA</addr-line>
            <country>United Kingdom</country>
            <phone>44 1908858234</phone>
            <email>Daniel.Gooch@open.ac.uk</email>
          </address>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-1936-3103</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author">
          <name name-style="western">
            <surname>Mehta</surname>
            <given-names>Vikram</given-names>
          </name>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-2382-7214</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>Stuart</surname>
            <given-names>Avelie</given-names>
          </name>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-7711-6149</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Katz</surname>
            <given-names>Dmitri</given-names>
          </name>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0003-1345-7539</ext-link>
        </contrib>
        <contrib id="contrib5" contrib-type="author">
          <name name-style="western">
            <surname>Bennasar</surname>
            <given-names>Mohamed</given-names>
          </name>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-8308-1632</ext-link>
        </contrib>
        <contrib id="contrib6" contrib-type="author">
          <name name-style="western">
            <surname>Levine</surname>
            <given-names>Mark</given-names>
          </name>
          <xref rid="aff3" ref-type="aff">3</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-5696-6021</ext-link>
        </contrib>
        <contrib id="contrib7" contrib-type="author">
          <name name-style="western">
            <surname>Bandara</surname>
            <given-names>Arosha</given-names>
          </name>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-8974-0555</ext-link>
        </contrib>
        <contrib id="contrib8" contrib-type="author">
          <name name-style="western">
            <surname>Nuseibeh</surname>
            <given-names>Bashar</given-names>
          </name>
          <xref rid="aff1" ref-type="aff">1</xref>
          <xref rid="aff4" ref-type="aff">4</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-3476-053X</ext-link>
        </contrib>
        <contrib id="contrib9" contrib-type="author">
          <name name-style="western">
            <surname>Bennaceur</surname>
            <given-names>Amel</given-names>
          </name>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-6124-9622</ext-link>
        </contrib>
        <contrib id="contrib10" contrib-type="author">
          <name name-style="western">
            <surname>Price</surname>
            <given-names>Blaine</given-names>
          </name>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-2247-9804</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>School of Computing and Communications</institution>
        <institution>The Open University</institution>
        <addr-line>Milton Keynes</addr-line>
        <country>United Kingdom</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>Department of Psychology</institution>
        <institution>University of Exeter</institution>
        <addr-line>Exeter</addr-line>
        <country>United Kingdom</country>
      </aff>
      <aff id="aff3">
        <label>3</label>
        <institution>Department of Psychology</institution>
        <institution>Lancaster University</institution>
        <addr-line>Lancaster</addr-line>
        <country>United Kingdom</country>
      </aff>
      <aff id="aff4">
        <label>4</label>
        <institution>Lero, the Science Foundation Ireland Research Centre for Software</institution>
        <institution>University of Limerick</institution>
        <addr-line>Limerick</addr-line>
        <country>Ireland</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Daniel Gooch <email>Daniel.Gooch@open.ac.uk</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <season>Apr-Jun</season>
        <year>2022</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>27</day>
        <month>4</month>
        <year>2022</year>
      </pub-date>
      <volume>9</volume>
      <issue>2</issue>
      <elocation-id>e34606</elocation-id>
      <history>
        <date date-type="received">
          <day>1</day>
          <month>11</month>
          <year>2021</year>
        </date>
        <date date-type="rev-request">
          <day>28</day>
          <month>11</month>
          <year>2021</year>
        </date>
        <date date-type="rev-recd">
          <day>7</day>
          <month>1</month>
          <year>2022</year>
        </date>
        <date date-type="accepted">
          <day>6</day>
          <month>3</month>
          <year>2022</year>
        </date>
      </history>
      <copyright-statement>©Daniel Gooch, Vikram Mehta, Avelie Stuart, Dmitri Katz, Mohamed Bennasar, Mark Levine, Arosha Bandara, Bashar Nuseibeh, Amel Bennaceur, Blaine Price. Originally published in JMIR Human Factors (https://humanfactors.jmir.org), 27.04.2022.</copyright-statement>
      <copyright-year>2022</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Human Factors, is properly cited. The complete bibliographic information, a link to the original publication on https://humanfactors.jmir.org, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://humanfactors.jmir.org/2022/2/e34606" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>The global population is aging, leading to shifts in health care needs. In addition to developing technology to support physical health, there is an increasing recognition of the need to consider how technology can support emotional health. This raises the question of how to design devices that older adults can interact with to log their emotions.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>We designed and developed 2 novel tangible devices, inspired by existing paper-based scales of emotions. The findings from a field trial of these devices with older adults are reported.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>Using interviews, field deployment, and fixed logging tasks, we assessed the developed devices.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>Our results demonstrate that the tangible devices provided data comparable with standardized psychological scales of emotion. The participants developed their own patterns of use around the devices, and their experience of using the devices uncovered a variety of design considerations. We discuss the difficulty of customizing devices for specific user needs while logging data comparable to psychological scales of emotion. We also highlight the value of reflecting on sparse emotional data.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>Our work demonstrates the potential for tangible emotional logging devices. It also supports further research on whether such devices can support the emotional health of older adults by encouraging reflection of their emotional state.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>older adults</kwd>
        <kwd>health</kwd>
        <kwd>emotion</kwd>
        <kwd>affect</kwd>
        <kwd>well-being</kwd>
        <kwd>tangible interaction</kwd>
        <kwd>TUI</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <sec>
        <title>Background Context</title>
        <p>The United Nations predicts that the global population aged 60 years and older will increase from 962 million in 2017 to 2.1 billion in 2050 and 3.1 billion in 2100, making this the fastest growing age group [<xref ref-type="bibr" rid="ref1">1</xref>]. These demographic changes will significantly impact how we think about supporting the health and well-being of the population. Older people can face long-term disabilities and chronic conditions as well as mental health difficulties [<xref ref-type="bibr" rid="ref2">2</xref>]. For example, Age UK has noted that the number of over-50s experiencing loneliness is set to reach 2 million by 2025/6. This compares to around 1.4 million in 2016/7—a 49% increase in 10 years. For the purposes of this work, the term “older adults” is used to refer to anyone over the age of 50 years based on the recommendations of Age UK (the main charity working with older adults in the United Kingdom).</p>
        <p>This increase in the older population will drive an increase in the need for carers and the costs of health care [<xref ref-type="bibr" rid="ref3">3</xref>]. This has led to significant amounts of research into how to enable people to age in place; “the desire and tendency of older persons to stay in their current dwelling units for as long as possible” [<xref ref-type="bibr" rid="ref4">4</xref>]. Compared to other forms of care, aging in place is more cost-effective and preferred by many older adults [<xref ref-type="bibr" rid="ref5">5</xref>]. This is because it can enhance many quality of life factors (eg, identity, autonomy, belonging, privacy, independence, social connections) [<xref ref-type="bibr" rid="ref6">6</xref>,<xref ref-type="bibr" rid="ref7">7</xref>].</p>
        <p>There have been promising developments in the design of technology to support the physical health of an aging population [<xref ref-type="bibr" rid="ref8">8</xref>-<xref ref-type="bibr" rid="ref10">10</xref>]. However, there is increasing recognition of the link between well-being and “successfully” aging, which makes it important to improve the psychological well-being of older adults [<xref ref-type="bibr" rid="ref11">11</xref>]. This necessitates mechanisms for the detection or logging of the older adult’s emotional state to either ensure that the older adult is happy or provide appropriate support when in emotional turmoil [<xref ref-type="bibr" rid="ref12">12</xref>-<xref ref-type="bibr" rid="ref14">14</xref>].</p>
        <p>Although a wide variety of digital technologies have been developed for the monitoring of emotions [<xref ref-type="bibr" rid="ref15">15</xref>-<xref ref-type="bibr" rid="ref23">23</xref>], there is little work that explores such interfaces specifically for older adults [<xref ref-type="bibr" rid="ref24">24</xref>]. In a review of apps for successful aging, no apps for monitoring emotions were identified [<xref ref-type="bibr" rid="ref25">25</xref>]. Given that older adults have distinct cognitive, physical, and technical skills, alongside distinct emotional needs, it is necessary to consider the design of a system for recording the emotional state of older adults at home [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref26">26</xref>].</p>
        <p>Many researchers argue that tangible user interfaces (TUIs) are ideal for use in domestic settings by older adults owing to both their acceptability in domestic settings and the comparatively quick learning curve [<xref ref-type="bibr" rid="ref27">27</xref>-<xref ref-type="bibr" rid="ref29">29</xref>]. TUIs allow the user to provide input to a digital system by manipulating physical objects (eg, moving them around or stretching and squeezing them). Similarly, output from the TUI interaction could be shown to the user through the manipulation of a physical object. TUIs have also been found to increase engagement with logging emotions, suggesting that this form factor could promote ongoing use [<xref ref-type="bibr" rid="ref17">17</xref>]. A broad review of the TUI literature for supporting social interactions among older adults highlights that most papers conclude that TUIs are highly usable for older adults [<xref ref-type="bibr" rid="ref30">30</xref>].</p>
        <p>In previous laboratory-based work, we have demonstrated that nonfunctional prototypes of tangible devices allow older adults to log emotions and collect data comparable to validated psychological scales of emotion [<xref ref-type="bibr" rid="ref31">31</xref>]. We build on this work by developing 2 of these nonfunctional prototype designs into tangible devices that can digitally record the logged emotions. Our field study with adults aged 51-85 years demonstrates the validity of logged data against existing scales of emotion, showing that tangible devices can provide data comparable to standard psychological scales in a home setting. We explored our participants’ experience of using the devices over a 6-week period. This provided an understanding of how users can appropriate the use of the devices as well as how key design characteristics are viewed. Our results highlight the potential of in-home tangible devices for recording the emotions of older adults and for supporting their emotional health through encouraging reflection of their emotional state.</p>
      </sec>
      <sec>
        <title>Background Literature</title>
        <p>By exploring previous approaches to logging emotion, we can identify key design properties that should be embedded in the design of tangible devices for logging emotions. Through exploring the literature on self-report scales of emotion, interfaces of logging emotion, and TUIs for logging emotion, we identify key design decisions and reflect on them when outlining the development of our TUI devices in the section “Designing tangible devices for logging emotions.”</p>
        <p>It is important from the outset to distinguish between emotion and mood. Although both refer to phenomenological states, they differ in 2 key dimensions [<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref33">33</xref>]. The first is time; emotions tend to be short-lived, whereas moods are more enduring. The second difference is that emotions are object-driven (ie, they relate to a specific object or experience), while moods are more general. The concepts are related; a person’s mood biases the emotions they experience and a person’s emotions contribute to the mood they are in. Throughout this paper, the term “mood” is used only when it is the term used by other researchers in their work. The terms “emotion” and “affect” are used interchangeably as is common practice [<xref ref-type="bibr" rid="ref33">33</xref>].</p>
        <p>Across all fields interested in emotional experience, there are 3 main approaches to detecting and measuring how people feel: physiological, behavioral cues, and self-report. This research is focused on self-reported measures of emotion. Although self-report measures have shortcomings, they provide the user with a level of control over the disclosure of their emotional state. This is important for older adults in having an active role in their health care needs [<xref ref-type="bibr" rid="ref34">34</xref>,<xref ref-type="bibr" rid="ref35">35</xref>]. Self-reporting emotions also has other benefits. From a well-being perspective, there is a rich literature on the benefits to an individual of emotional reflection and recording, which is commonly used as a therapeutic technique [<xref ref-type="bibr" rid="ref36">36</xref>]. Studies are starting to show how technologically-mediated reflection and recording can improve well-being [<xref ref-type="bibr" rid="ref37">37</xref>] and promote behavior change [<xref ref-type="bibr" rid="ref38">38</xref>]. From a methodological perspective, a recent review of ecological momentary assessment of mood highlights the importance of self-reporting due to ecological validity and agency [<xref ref-type="bibr" rid="ref39">39</xref>].</p>
      </sec>
      <sec>
        <title>Self-report Scales of Emotion</title>
        <p>There are many different measures and scales focused on emotion in the psychology literature. Desmet et al [<xref ref-type="bibr" rid="ref15">15</xref>] provide an excellent review of this literature. These measures predominantly coalesce around 2 concepts: valence (pleasure) and arousal (strength of feeling). Dominance is a third concept that is also sometimes used [<xref ref-type="bibr" rid="ref40">40</xref>]. Proponents argue that these 3 dimensions can account for significant variances in people’s emotional experiences and collectively correspond to affect.</p>
        <p>Russell’s 2D approach to conceptualizing emotion is one of the most popular measures of emotion [<xref ref-type="bibr" rid="ref41">41</xref>,<xref ref-type="bibr" rid="ref42">42</xref>]. He models emotion as a spatial distribution across 2 scales (valence and arousal) (see <xref rid="figure1" ref-type="fig">Figure 1</xref>). This approach argues that a spatial model provides a conceptual structure for related emotive concepts in such a way that allows the self-reporting of emotions [<xref ref-type="bibr" rid="ref41">41</xref>]. A related approach uses emotive words to distinguish between related emotive states. One of the first commonly used robust measures that took this approach was the Semantic Differential Scale, consisting of a set of 18 bipolar adjective pairs [<xref ref-type="bibr" rid="ref43">43</xref>]. Each pair is then rated along a 9-point scale. Although heavily used, the measure is extremely cumbersome to use, requiring 18 different measurement ratings for each stimulus. It also relies on an individual’s English reading skills.</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>A schematic for the 2D structure of emotion from [<xref ref-type="bibr" rid="ref41">41</xref>]. The valence scale runs left-to-right and the arousal scale runs top-to-bottom.</p>
          </caption>
          <graphic xlink:href="humanfactors_v9i2e34606_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <p>A variety of pictorial scales have also been developed. The Self-Assessment Manikin (SAM) is a classic example, made up of 3 pictorial scales: for affect (or valence), the pictures range “from a smiling, happy figure to a frowning, unhappy figure;” for arousal, the pictures range “from an excited, wide-eyed figure to a relaxed, sleepy figure;” and for dominance, the change is in the size of the figure, “a large figure indicates maximum control in the situation” [<xref ref-type="bibr" rid="ref40">40</xref>]. Although SAM is straightforward to conceptualize, it is somewhat complicated to administer, particularly in terms of explaining the dominance dimension. Some have argued that the only intuitive scale is valence (positive vs negative facial expression) [<xref ref-type="bibr" rid="ref15">15</xref>].</p>
        <p>Alternative pictorial scales have aimed for even greater simplicity. For example, the “smileyometer” was developed as a single Likert-scale style set of emotive faces [<xref ref-type="bibr" rid="ref44">44</xref>], while Desmet et al [<xref ref-type="bibr" rid="ref15">15</xref>] generated 8 cartoon figures to represent key emotions. A questionnaire-based study with 191 participants suggests that their scale can provide robust and reliable assessments of individuals’ emotions.</p>
        <p>All of these scales were designed to be completed on paper. Given that we are designing an interactive technology for the collection of emotional data, we now explore the literature on interfaces for collecting self-reported emotions.</p>
      </sec>
      <sec>
        <title>Interfaces for Logging Emotion</title>
        <p>“A wide range of digital symptom monitoring tools exist, but there is a lack of evidence regarding their effectiveness in a health care context, particularly in the area of mental health” [<xref ref-type="bibr" rid="ref45">45</xref>]. Much of the evidence that exists focuses on apps for people with mental health disorders (such as bipolar disorder) [<xref ref-type="bibr" rid="ref46">46</xref>-<xref ref-type="bibr" rid="ref51">51</xref>]. The findings from these studies highlight which design properties have led to beneficial results and thereby provide insights into the design properties to utilize in the development of tangible alternatives.</p>
        <p>An 8-week randomized trial of a suite of 13 mental health apps by Zhang et al [<xref ref-type="bibr" rid="ref49">49</xref>] identified 3 distinct user behaviors: learning, goal setting, and self-tracking. Most importantly for our interests, participants who engaged in self-tracking experienced reduced depression symptoms. This is significant, as it suggests that logging affect can lead to improved emotional well-being. Zhang et al [<xref ref-type="bibr" rid="ref49">49</xref>] also found that “greater amounts of engagement did not necessarily lead to greater reductions in depression.” This is an important design principle as it highlights that the device does not necessarily need to repeatedly harass users to enter data; as long as they engage with the system, they will receive some form of benefit.</p>
        <p>True Colours is a digital tool for monitoring mood disorders. Used by over 36,000 individuals, it has formed part of 21 unique research and clinical service settings in the United Kingdom [<xref ref-type="bibr" rid="ref45">45</xref>]. In addition to providing additional evidence of the efficacy of the digital logging of affect, the authors also note that the technology provides many advantages over hard copy symptom monitoring diaries, including the ability to prompt for input and the ability to easily visualize changes over time [<xref ref-type="bibr" rid="ref45">45</xref>].</p>
        <p>Chandrashekar [<xref ref-type="bibr" rid="ref50">50</xref>] has reviewed meta-studies of the use of apps for people with depression, anxiety, and schizophrenia. In addition to demonstrating that these apps can have clinical benefits for these conditions, they also established some characteristics of high-efficacy apps. Among other features, providing a simple user interface and minimal usage reminders were highlighted as helping provide benefits to users.</p>
        <p>Beyond these apps developed to help people with mental health disorders, there are a variety of interfaces that draw on self-report constructs of emotion to support the logging of emotion based on pictorial scales [<xref ref-type="bibr" rid="ref15">15</xref>] or Russell’s circumplex model [<xref ref-type="bibr" rid="ref41">41</xref>,<xref ref-type="bibr" rid="ref42">42</xref>]. None of these studies involved older adults, and the study focus was on exploring the developed design rather than the efficacy for users.</p>
        <p>Fernández et al [<xref ref-type="bibr" rid="ref52">52</xref>] developed a digital diary, specifically designed for older users. Users were encouraged to complete predefined questions about self-care and emotions answered on a tablet device. Fernández et al [<xref ref-type="bibr" rid="ref52">52</xref>] focus on the usability elements of their design and field-tested the system with 10 participants aged over 60 years, who used the device for 5 days. Nine of the participants agreed that they would like to continue using the tool, and data collected from the study suggested that the simple act of logging was sufficient to prompt users to reflect about their day and how they were feeling.</p>
        <p>Although the use of these interfaces has identified certain design properties as significant, they are not tangible devices. We now explore the sparse literature on TUIs for logging emotion to identify design properties specific to this interaction paradigm.</p>
      </sec>
      <sec>
        <title>TUI for Logging Emotion</title>
        <p>A small number of tangible interfaces has been developed to log emotions. The EmoBall [<xref ref-type="bibr" rid="ref53">53</xref>] used an LED matrix grid to display “faces” with positive (smiling) or negative (frowning) faces. When the ball is pressed, the display shows a face depicting a different emotion; when the ball is pressed twice, the displayed emotion is logged and the ball vibrates. While evaluated through focus groups with 16 people, the study investigated the usability of EmoBall for people with “low digital competences” rather than its efficacy as a mood logging device.</p>
        <p>In a different context, the subtle stone was developed to allow students to privately share their affect with their teacher within a classroom setting [<xref ref-type="bibr" rid="ref54">54</xref>]. A ribbed rubber ball, the subtle stone contained 6 LEDs, which could display 7 separate colors. Each student could develop their own color/emotion mapping, and an emotion is selected by repeatedly squeezing the ball until the color is shown. This was field-trialed with 15 UK school students (aged 12-13 years) throughout 9 hours of German language lessons, with students reporting that the device “supported reflection on emotional experience by giving them a way of thinking about their emotions.”</p>
        <p>The Mood TUI was developed to make mood collection fun and engaging [<xref ref-type="bibr" rid="ref17">17</xref>]. Designed as a cube with a different emoticon on each face, users select a mood by rotating the cube until the desired emoticon is facing upwards. Evaluated through discussion sessions with 32 participants, Sarzotti [<xref ref-type="bibr" rid="ref17">17</xref>] concludes that there was interest in the design concept.</p>
        <p>Jingar and Lindgren [<xref ref-type="bibr" rid="ref55">55</xref>] took a design-oriented approach, co-designing TUIs to support the emotional health of older adults. Their interest was in how emotions could be communicated to a digital agent through tangible interactions. The variety of prototypes developed highlights the scope of the design space and the potential of TUIs to support older adults. Analyzing the data from their workshop, Jingar and Lindgren [<xref ref-type="bibr" rid="ref55">55</xref>] argue that the nature of TUIs means that they may be “intuitive and natural to use, and intrinsic motivation may be promoted” [<xref ref-type="bibr" rid="ref55">55</xref>].</p>
        <p>Our previous work has highlighted the value of TUIs, particularly for those older adults who have arthritis or other musculoskeletal difficulties. Arthritis is a common condition, particularly in later life [<xref ref-type="bibr" rid="ref56">56</xref>], and musculoskeletal difficulties can limit an individual’s ability to control a graphical user interface [<xref ref-type="bibr" rid="ref57">57</xref>]. This makes tangible devices extremely suitable for use by older adults.</p>
      </sec>
      <sec>
        <title>Research Objectives</title>
        <p>Although there is substantial literature on developing apps, interfaces, scales, and measures for logging emotion, few are explicitly designed for older adults ([<xref ref-type="bibr" rid="ref15">15</xref>-<xref ref-type="bibr" rid="ref23">23</xref>] focus primarily on younger adults). We are specifically interested in designing tools to support older adults to log emotions; therefore, we draw on this work for inspiration. Given that research highlights the potential benefits of designing TUIs for older adults, we specifically focus on designing and developing novel tangible devices. Taking inspiration from existing paper-based scales of emotions, we explore what design properties are valued by older adults in the context of monitoring their emotional state. From the literature in the background section (see <xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>) [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref20">20</xref>,<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref49">49</xref>-<xref ref-type="bibr" rid="ref55">55</xref>], the key design considerations that appear to have a significant impact on participants’ use of the devices were to (1) minimize prompting, (2) ensure a clear mapping between the TUI interaction and the mood to be logged, (3) minimize fine grain movement, and (4) ensure that devices had a high-quality finish, suitable for use in a home location.</p>
      </sec>
      <sec>
        <title>Designing Tangible Devices for Logging Emotions</title>
        <p>We build on our earlier work on mood logging [<xref ref-type="bibr" rid="ref31">31</xref>] to explore (1) whether digital TUIs can log emotional data comparable to validated psychological scales of emotion and (2) whether such devices would engage older adult participants and what their view of particular design characteristics were after using the devices in a home context. Thus, our first design decision was to focus on TUIs and convert the validated nonfunctional prototype designs into digital devices.</p>
        <sec>
          <title>Key Design Decisions</title>
          <p>Stepping back from the intricacies of particular device designs, it is necessary to discuss one of the underlying psychological practices that supports the efficacy of logging data: reflection. Reflection is a key part of all logging behavior. Manual data collection can support the process of reflection in action [<xref ref-type="bibr" rid="ref58">58</xref>]. In the context of logging emotion, it is well-established that taking the time to consider your emotional state has benefits in itself, particularly in terms of someone deciding to change behavior based on their reflection [<xref ref-type="bibr" rid="ref34">34</xref>,<xref ref-type="bibr" rid="ref35">35</xref>,<xref ref-type="bibr" rid="ref59">59</xref>-<xref ref-type="bibr" rid="ref62">62</xref>].</p>
          <p>Our second design decision was to provide the device users with no access to their recorded data during typical use. Users would only be shown their collected data at the end of the field deployment and if they asked to see it (to promote the transparency of the research). This stands in contrast to many self-logging devices but allows us to explore any benefits of engagement with the data creation process, without confounding it with the benefits of reflecting on the historical data.</p>
          <p>Our third design decision was to require minimal interaction [<xref ref-type="bibr" rid="ref63">63</xref>,<xref ref-type="bibr" rid="ref64">64</xref>], a design property that can help reduce the potential high burden of manual tracking. Given the perceived time burden of manual tracking [<xref ref-type="bibr" rid="ref60">60</xref>], leading to high attrition rates [<xref ref-type="bibr" rid="ref65">65</xref>], by minimizing the users’ interaction with the device, the potential time burden is also minimized.</p>
        </sec>
        <sec>
          <title>Selecting the Emotion Scale</title>
          <p>The background section highlighted the wide range of available emotion scales. Our previous exploration of nonfunctional prototypes using 3 distinct scales indicated that 2 of the scales should be developed further into digital devices. The prototype based on the emotive words from Russell’s circumplex were liked by users, given the simplicity of interaction and the speed of use. The prototype using the circumplex itself was liked by users, as it supported a more free-flowing process of reflection about their emotional state [<xref ref-type="bibr" rid="ref31">31</xref>]. We decided to use these 2 scales of emotion.</p>
          <p>Note that because these 2 scales represent the same conceptualization, analyzing the accuracy of logged data becomes easier. <xref rid="figure2" ref-type="fig">Figure 2</xref> shows how the 2 scales can be considered to be somewhat equivalent. Taking the emotion of “excited,” the blue-highlighted octant can be taken to represent the emotion “excited” in the circumplex, and it is represented by the word “excited.”</p>
          <fig id="figure2" position="float">
            <label>Figure 2</label>
            <caption>
              <p>A representation of how the circumplex of affect [<xref ref-type="bibr" rid="ref40">40</xref>] and the emotive words from [<xref ref-type="bibr" rid="ref40">40</xref>] are both representations of the same scale.</p>
            </caption>
            <graphic xlink:href="humanfactors_v9i2e34606_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <p>We chose to focus on developing devices that can record 8 emotions: happy, excited, nervous, annoyed, sad, bored, calm, and relaxed. These 8 emotions provide wide coverage over the range of potential emotions and are a commonly used subset of representative emotions [<xref ref-type="bibr" rid="ref15">15</xref>].</p>
        </sec>
      </sec>
      <sec>
        <title>Designing the Devices</title>
        <p>Our previous work [<xref ref-type="bibr" rid="ref31">31</xref>] focused on the development of nonfunctional prototypes of TUIs, which fulfilled the need to require minimal interactions [<xref ref-type="bibr" rid="ref50">50</xref>,<xref ref-type="bibr" rid="ref63">63</xref>,<xref ref-type="bibr" rid="ref64">64</xref>]. As we have previously reported the design and development of these prototypes, here, we focus on the physical and electrical design of translating the nonfunctional paper prototypes into working digital TUIs. The resulting designs were named the Emotion Clock and the Emotion Board. These devices were developed by drawing on the design characteristics highlighted through the papers in the background section, in constant conversation with experts at Age UK to ensure that the resulting designs would be appropriate for use by older adults.</p>
        <sec>
          <title>Emotion Clock</title>
          <p>The Emotion Clock arranges 8 emotive words around a clockface in accordance with Russell’s valence/arousal circumplex [<xref ref-type="bibr" rid="ref41">41</xref>,<xref ref-type="bibr" rid="ref42">42</xref>] (see <xref rid="figure3" ref-type="fig">Figure 3</xref>). A user selects an emotion by rotating the clock hand to the word describing the emotion they want to convey. The words are engraved into a wooden clock face, with the electronics hidden in a recess behind the clock face. The Emotion Clock has a diameter of 26 cm. Users were not instructed on how to use the hand. Although the clock allows users to record on a continuous scale, leaving the hand between 2 words, for the purposes of analysis, the nearest word to the hand position is recorded.</p>
          <fig id="figure3" position="float">
            <label>Figure 3</label>
            <caption>
              <p>The Emotion Clock, using a subset of the emotive words in [<xref ref-type="bibr" rid="ref40">40</xref>]. The emotion is set to Happy.</p>
            </caption>
            <graphic xlink:href="humanfactors_v9i2e34606_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
        </sec>
        <sec>
          <title>Emotion Board</title>
          <p>The Emotion Board is a tangible representation of Russell’s axes [<xref ref-type="bibr" rid="ref41">41</xref>,<xref ref-type="bibr" rid="ref42">42</xref>], using the color scheme from Rivera-Pelayo et al [<xref ref-type="bibr" rid="ref20">20</xref>] (see <xref rid="figure4" ref-type="fig">Figure 4</xref>). The axes are labelled High Energy to Low Energy (top to bottom) and Feeling Bad to Feeling Good (left to right). A user moves a magnet around to select a position on the axes and thus represent an emotive state. Framed in wood, there are 2 versions of the electronics behind the Emotion Board. The first version uses a custom piece of eTextiles, which is segmented to represent 16 sections of the axes (a high-arousal and low-arousal area for each of the 8 emotions). The second version uses an array of reed switches to achieve the same result but at a significantly lower cost. The board is approximately 26 square centimeters.</p>
          <fig id="figure4" position="float">
            <label>Figure 4</label>
            <caption>
              <p>The Emotion Board, based on the Russell axes in [<xref ref-type="bibr" rid="ref40">40</xref>] using the color scheme from [<xref ref-type="bibr" rid="ref20">20</xref>]. The emotion is set to Calm.</p>
            </caption>
            <graphic xlink:href="humanfactors_v9i2e34606_fig4.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
        </sec>
        <sec>
          <title>Logging Behavior</title>
          <p>The 2 devices adopted the same approach for how the underlying electronics capture the logged emotion. Both devices were controlled by a custom circuit board that could receive the logged mood from the device and transmit the log to a Raspberry Pi over Wi-Fi using the Message Queuing Telemetry Transport protocol. The Raspberry Pi was connected to the participants’ router and could then send the log to our server using HTTPS.</p>
          <p>The devices are powered using AA batteries rather than mains power. This allowed users to place the devices wherever they would like in their homes and improved the aesthetics of the devices by removing trailing wires. New batteries are sufficient to power the devices for at least 3 weeks. During the field trial, none of the participants’ devices ran out of power. One implication of this decision is that the electronics must be low powered so that users do not have to repeatedly replace the batteries. As such, the electronics are programmed to capture the recorded data in a targeted way. Each log is recorded on a central server rather than locally on the device. This allowed us to monitor whether a deployed device was working. It also meant that we could keep an accurate record of the logged data without having to worry about the device being damaged and losing locally stored data.</p>
          <p>Both devices “woke up” every 5 minutes to check the position of the clock hand or magnet. If the position had not changed (indicating no new emotion input), the device went back to sleep and nothing was recorded. If the position had changed, the device sent the new emotion to our servers over Wi-Fi and recorded it locally (replacing the previously recorded emotion). The device would try to send the data to the servers up to 10 times before returning to sleep; if it had not successfully sent the emotion, it would attempt to send the locally recorded emotion the next time it woke up—this would continue until the batteries ran out.</p>
          <p>Following advice from the literature, the devices prompt users to log their emotions regularly but infrequently to ensure sufficient reflection without placing an undue burden on users [<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref49">49</xref>,<xref ref-type="bibr" rid="ref50">50</xref>]. The devices beeped at noon and 6 PM for 5 seconds irrespective of how many inputs were given by the user for that date. The devices did not beep at any time outside this window. To encourage at least 1 logging action per day, between noon and 6 PM, the device beeped on every hour until a mood was logged. In designing this protocol, the disruption of users was minimized while prompting them to think about their emotions.</p>
        </sec>
      </sec>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Ethics Approval</title>
        <p>Our study was designed in accordance with our University’s code of ethics and approved by the Open University Human Research Ethics Committee (HREC/3343/Gooch).</p>
      </sec>
      <sec>
        <title>Device</title>
        <p>Each of the devices was piloted and was found to induce no discomfort. Participants had the right to refuse to use either of the devices, and it was possible for participants to immediately end their use of a device if they experienced any discomfort. None of the participants opted to do so. We had 2 key concerns in exploring the value of the developed devices. The first is whether participants could accurately record their emotional state through the prototype. The second concern was to explore how our participants used the devices and their view on the design characteristic embodied within the devices.</p>
      </sec>
      <sec>
        <title>Procedure</title>
        <p>A field-trial approach was used to evaluate the devices over a period of 6 weeks. This involved each participant taking part in a prestudy session, a midstudy session, and an exit-study session. Each of these sessions took place at a participant’s home and were audio recorded. The sessions lasted between 25 and 54 minutes (mean 28 minutes). Each session was one-to-one between a researcher and participant. Each participant used both devices for 3 weeks. The ordering of which device was used first was counterbalanced between participants as much as possible, although more Emotion Clocks had been manufactured, meaning the majority of participants (n=7) used this device first. The semistructured interview script can be found in <xref ref-type="supplementary-material" rid="app2">Multimedia Appendix 2</xref>. The procedure was as follows.</p>
        <sec>
          <title>Prestudy Session</title>
          <p>Sessions began by the researcher explaining that the purpose of the study was to explore new ways of logging emotion and highlighting that no personal emotional experiences would be logged. Informed consent was then collected. Subsequently, this session comprised the following activities: (1) initial data collection, (2) device orientation, (3) emotion logging calibration, and (4) device setup. Each of these activities is described below.</p>
          <list list-type="order">
            <list-item>
              <p>Initial data collection: Some basic demographic information was collected from the participant, as well as conducting a short interview regarding any existing logging behaviors (such as keeping a diary), their use of logging technology (such as a Fitbit), and what prompted the participant to take part in the trial.</p>
            </list-item>
            <list-item>
              <p>Device orientation: Participants were given a brief explanation of one of the devices (counterbalanced between participants) and how they represent the 2 dimensions of emotion. The researcher answered any questions the participant had regarding the device.</p>
            </list-item>
            <list-item>
              <p>Emotion logging calibration: The main element of the prestudy session was to gather data as to whether participants could log emotions using the selected device with the same accuracy as with the standardized paper-based scales. To ensure coverage across different emotional states, standardized emotive vignettes were used. The Affective Norms for English Text (ANET) vignettes are linked to known SAM scores, giving us a known emotion associated with each vignette [<xref ref-type="bibr" rid="ref66">66</xref>] (referred to as the expected vignette emotion). These texts have previously been used in studies of emotional interfaces [<xref ref-type="bibr" rid="ref16">16</xref>], as well as with our previous nonfunctional prototypes [<xref ref-type="bibr" rid="ref31">31</xref>]. For each of the 8 emotions (happy, calm, nervous, excited, sad, relaxed, bored, and annoyed), a short vignette with SAM scores corresponding to that emotion was selected. A condition of using the ANET vignettes is to keep them confidential; so, we are unable to republish them. To illustrate the tone of the vignettes, these 2 examples were written by the first author: (1) “You receive a letter informing you that you have won a holiday to the Caribbean in the quiz you entered last week” (excited) (2) “You discover that your best friend has been diagnosed with a serious illness” (sad). Participants were provided with the vignettes in a randomized order. Having read the text, participants were asked which emotion was portrayed by the vignette. This description is referred to as the participant description. For all of the vignettes, all of the participants provided a synonym of one of the 8 emotions (eg, thrilled becomes excited). The participant description allows us to test that the emotion logged by a participant through the prototype matches the emotion the participant wanted to log. Participants were then asked to record the emotion from the vignette through the prototype. The researcher recorded the result for the prototype alongside the time taken by the participant to record the emotion. Completing this exercise prior to setting the device up means that the logged emotions do not include this initial test.</p>
            </list-item>
            <list-item>
              <p>Device setup: The prestudy session ended with the researcher setting the device up within the participants’ home for them to log their emotions for 3 weeks. Participants were instructed that they could place the device wherever they wanted within the home. In terms of use, participants were told that “the device will prompt you to input your emotions twice a day. You can provide more inputs if you wish to.”</p>
            </list-item>
          </list>
          <p>At the end of the session, participants were provided with contact details and informed that they could contact us at any time if they were experiencing problems or wanted to talk about the study. We could remotely monitor whether the devices were working correctly by checking the server holding the logged emotions.</p>
        </sec>
        <sec>
          <title>Midstudy Session</title>
          <p>The focus of the midstudy session was to swap over the 2 devices at 3 weeks after the prestudy session. The session started with an audio-recorded wrap-up interview for the device the participant had been using for 3 weeks. The interview covered aspects such as exploring whether the participant had noticed an impact on how they felt, what their general thoughts about the device were, and specific questions regarding the prompting, the aesthetics, the difficulty of interaction, and whether they would hypothetically be willing to share the emotion data they had recorded. Having completed the interview, the researcher swapped over the devices and then repeated the prestudy session with the participant for the second device.</p>
        </sec>
        <sec>
          <title>Exit-Study Session</title>
          <p>Three weeks after the midstudy session, the exit-study session concluded the study and compared the experience of using the 2 devices. The session started with a wrap-up interview for the device the participant had been using for 3 weeks, following the same procedure as for the midstudy session. The session concluded by asking participants to complete a short interview, which was audio recorded. Participants were asked about their general thoughts about the idea of recording their emotions, how hard they found each prototype to use, how hard each prototype was to understand, and their opinions about having a similar device in their home. Further questions explored whether participants continued to be interested in logging how they felt; comparing the 2 devices in terms of use, aesthetics, and how hard they found each prototype to use; and any changes the participant could suggest for improving either of the devices. The study ended with a short debrief, during which time participants were thanked. Participants were shown graphs of their mood data for full disclosure of the collected data. Participants were provided with a £30 (US $39) honorarium for taking part in the study.</p>
        </sec>
      </sec>
      <sec>
        <title>Analysis</title>
        <p>In analyzing the data from the study, we had 2 main questions. The first relates to the accuracy of the prototypes: could participants log the emotion they want to log through the prototype devices? The second was to explore our participants’ use of the devices and consider their response to the design characteristics embodied by the devices.</p>
        <sec>
          <title>Accuracy of the Prototypes</title>
          <p>The data from each of the prototypes can be analyzed categorically and ordinally, as outlined previously [<xref ref-type="bibr" rid="ref31">31</xref>]. As categorical data, there is “ground truth” for each vignette because each vignette is taken from a validated set of emotive texts. Therefore, the emotion the vignette should be provoking in our participants is known (the <italic>expected vignette emotion</italic>). We also have the <italic>participant description</italic>, the emotion the participant believes each vignette expresses. To determine whether the prototypes allow participants to log the emotion they wanted to record, Cohen kappa is used to compare the emotion recorded through the prototype against (1) the <italic>expected vignette emotion</italic> and (2) the <italic>participant description</italic>. Cohen kappa ranges from no agreement (κ=0) to complete agreement (κ=1) [<xref ref-type="bibr" rid="ref67">67</xref>].</p>
          <p>A problem with treating the data as categorical is that it removes any connection between the different emotions. For example, if a participant records “happy” instead of “excited,” that is a closer match than if they record “sad.” An alternative way of conceptualizing the data is as 2 ordinal scales. Each of the prototypes uses a scale based on Russell’s circumplex of affect (see <xref rid="figure2" ref-type="fig">Figure 2</xref>); therefore, each emotion can be represented as a pair of figures ranging from –2 to +2 for both valence and arousal (see <xref rid="figure5" ref-type="fig">Figure 5</xref>).</p>
          <fig id="figure5" position="float">
            <label>Figure 5</label>
            <caption>
              <p>A representation of how the emotions can be given ordinal values on the circumplex of affect.</p>
            </caption>
            <graphic xlink:href="humanfactors_v9i2e34606_fig5.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <p>As an indicator of similarity, it is possible to calculate the Euclidean distance by calculating the distance between 2 matrices (the expected emotional values and the actual emotional values), with each matrix being formed of the valence and arousal values. The distance reflects the size of dissimilarity between the expected emotions and the recorded emotions; the more dissimilar, the greater the distance between them. The Euclidean distance between 2 observations is the length of the line between them. The equation in <xref rid="figure6" ref-type="fig">Figure 6</xref> is used to calculate the distance across all samples. In both the categorical Cohen kappa and the ordinal Euclidean distance, we are not interested in the statistical performance <italic>per se</italic>. Instead, we are looking for confirmation that the prototypes allowed participants to log the emotion they wished to record.</p>
          <fig id="figure6" position="float">
            <label>Figure 6</label>
            <caption>
              <p>The equation for calculating Euclidean distance.</p>
            </caption>
            <graphic xlink:href="humanfactors_v9i2e34606_fig6.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
        </sec>
        <sec>
          <title>Analyzing Participants’ Views</title>
          <p>The interviews were audio recorded and transcribed. An inductive open coding approach was used to identify concepts and themes within the interview transcripts [<xref ref-type="bibr" rid="ref68">68</xref>]. The transcripts were subjected to a line-by-line analysis by the first author, who had not interviewed any of the participants. Through this initial analysis, concepts were identified and labelled within the data. No codes existed prior to the analysis; they were created through constant comparison of the data and the application of labels to the text.</p>
          <p>These codes were subsequently categorized into unifying themes by the first author. These themes were there discussed in conjunction with the 3 authors who had interviewed the participants, to ensure that the developed themes corresponded with their interpretation of the data, informed by the interviews they had participated in.</p>
        </sec>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <sec>
        <title>Recruitment</title>
        <p>Eleven participants were recruited to take part in the study. The study was interrupted by the COVID-19 pandemic. This meant participant #9 could not fully complete the study, as it was not possible to switch the devices over and they only used the Emotion Clock. A further 2 participants (participant #10/ participant #11) could not start the study owing to difficulties of setting the devices up within their homes. Two other participants were somewhat impacted by the pandemic, with participant #5 and participant #8 using their second device, as the United Kingdom went into lockdown. It is unknown whether this had an impact on their logging behavior. We have full data from 8 participants, and partial data from participant #9.</p>
        <p>Participants had to be aged over 50 years, be fluent in English, and to have no significant cognitive impairments. Participants’ ages ranged from 51 to 85 years (mean 69 [SD 11.9] years). Seven of our 9 participants were females. All 9 participants had English as their first language. None of the participants reported a history of mental health concerns. Participants were recruited through Age UK Exeter (participant #6, participant #7, participant #9) or personal contact with the authors (participant #1-5, participant #8) through word of mouth or previous participation in other studies. None of the participants had disruptive physical difficulties or cognitive impairments. <xref ref-type="table" rid="table1">Table 1</xref> shows the demographics of our participants.</p>
        <p>We found no differences in our analysis between those participants who received the Emotion Clock first and those who received the Emotion Board first.</p>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Demographics of our participants.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="250"/>
            <col width="170"/>
            <col width="240"/>
            <col width="340"/>
            <thead>
              <tr valign="top">
                <td>Participants</td>
                <td>Age (years)</td>
                <td>Gender</td>
                <td>First device</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Participant #1</td>
                <td>69</td>
                <td>Female</td>
                <td>Emotion Clock</td>
              </tr>
              <tr valign="top">
                <td>Participant #2</td>
                <td>74</td>
                <td>Female</td>
                <td>Emotion Board</td>
              </tr>
              <tr valign="top">
                <td>Participant #3</td>
                <td>69</td>
                <td>Female</td>
                <td>Emotion Clock</td>
              </tr>
              <tr valign="top">
                <td>Participant #4</td>
                <td>51</td>
                <td>Male</td>
                <td>Emotion Clock</td>
              </tr>
              <tr valign="top">
                <td>Participant #5</td>
                <td>54</td>
                <td>Female</td>
                <td>Emotion Clock</td>
              </tr>
              <tr valign="top">
                <td>Participant #6</td>
                <td>85</td>
                <td>Female</td>
                <td>Emotion Clock</td>
              </tr>
              <tr valign="top">
                <td>Participant #7</td>
                <td>60</td>
                <td>Male</td>
                <td>Emotion Board</td>
              </tr>
              <tr valign="top">
                <td>Participant #8</td>
                <td>79</td>
                <td>Female</td>
                <td>Emotion Clock</td>
              </tr>
              <tr valign="top">
                <td>Participant #9</td>
                <td>80</td>
                <td>Female</td>
                <td>Emotion Clock</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
      </sec>
      <sec>
        <title>Accuracy of the Logged Emotions</title>
        <p>Using standard ANET vignettes provides baseline data of the emotion associated with the vignette, while the <italic>participant description</italic> states what emotion the participant wanted to record. Both can then be compared against the emotions recorded through the 2 prototypes.</p>
        <p>The first stage of this comparison is to examine the results as categorical data. <xref ref-type="table" rid="table2">Table 2</xref> presents the results from calculating Cohen kappa for each prototype, comparing the emotion recorded in the prototype against (1) the expected result based on the ANET vignette scores and (2) the participant-described emotions. The results show at least moderate agreement (all kappa values&#62;0.5 at <italic>P</italic>&#60;.001) [<xref ref-type="bibr" rid="ref69">69</xref>], with the Emotion Clock demonstrating strong agreement.</p>
        <table-wrap position="float" id="table2">
          <label>Table 2</label>
          <caption>
            <p>Cohen kappa values for each prototype.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="230"/>
            <col width="370"/>
            <col width="400"/>
            <thead>
              <tr valign="top">
                <td>Prototype</td>
                <td>Expected vignette emotion</td>
                <td>Participant description emotion</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Emotion Clock</td>
                <td>0.79</td>
                <td>0.91</td>
              </tr>
              <tr valign="top">
                <td>Emotion Board</td>
                <td>0.5</td>
                <td>0.5</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
        <p>Examining the results as ordinal data, we calculated the Euclidean distance between the valence/arousal values collected through the prototypes and the expected valence/arousal from the vignettes. The Euclidean distance between the values collected through the prototypes and the participant’s description of the vignette was also calculated. <xref ref-type="table" rid="table3">Table 3</xref> shows the Euclidean distances for each of the prototypes. To interpret these figures, it is important to note that there are 64 data points (8 vignettes from 8 participants) on 2 scales running from –2 to +2.</p>
        <table-wrap position="float" id="table3">
          <label>Table 3</label>
          <caption>
            <p>The Euclidean distance for the valence and arousal data recorded through each interface compared against the expected data from the vignette and the participant description.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="170"/>
            <col width="350"/>
            <col width="480"/>
            <thead>
              <tr valign="top">
                <td>Prototype</td>
                <td>Vignette total distance</td>
                <td>Participant description total distance</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>Emotion Clock</td>
                <td>21.65</td>
                <td>18.35</td>
              </tr>
              <tr valign="top">
                <td>Emotion Board</td>
                <td>58.45</td>
                <td>21.40</td>
              </tr>
            </tbody>
          </table>
        </table-wrap>
        <p>To contextualize the data, we also calculated what the Euclidean distance would be if, for a given interface, all participants were 1 emotion out (see <xref rid="figure5" ref-type="fig">Figure 5</xref>, eg, the expected emotion was “excited” and the participant records “happy”). Such a scenario provides a Euclidean distance of 90.51. We also calculated what the Euclidean distance would be if, for a given interface, all participants provided the opposite emotion (eg, the expected emotion was “happy” and the participant records “sad”). Such a scenario provides a Euclidean distance of 286.22. Compared against these contextual calculations, our results in <xref ref-type="table" rid="table3">Table 3</xref> show strong-to-moderate agreement between the expected emotion and the recorded emotion. This suggests that the disagreements between expected emotions and recorded emotions noted by the Cohen kappa results were not large discrepancies (eg, logging “happy” instead of “sad”‘) but small (eg, logging “excited” instead of “happy”).</p>
        <p>Consistent with the kappa results, these results show a clear difference in the accuracy of the prototype responses with the emotions logged through the Emotion Clock being the closest to both the vignette and <italic>participant description</italic> values.</p>
      </sec>
      <sec>
        <title>Participant Use of the Devices</title>
        <sec>
          <title>Usage Behaviors</title>
          <p>Having established the accuracy of the devices, we considered the ways in which our participants used the prototypes. Our 9 participants recorded 1085 emotions across the 42-day study (see <xref ref-type="table" rid="table4">Table 4</xref>). The graph in <xref rid="figure7" ref-type="fig">Figure 7</xref> shows the number of emotions recorded by each participant by study week. This shows some indication of novelty effects (with a high peak for most participants in week 1 and then, a general decline), but the number of emotions recorded is relatively consistent over time.</p>
          <fig id="figure7" position="float">
            <label>Figure 7</label>
            <caption>
              <p>A graph showing the number of logs made by each participant by study week. P: participant.</p>
            </caption>
            <graphic xlink:href="humanfactors_v9i2e34606_fig7.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <table-wrap position="float" id="table4">
            <label>Table 4</label>
            <caption>
              <p>Number of emotions logged through the prototypes.</p>
            </caption>
            <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
              <col width="330"/>
              <col width="330"/>
              <col width="340"/>
              <thead>
                <tr valign="top">
                  <td>Participants</td>
                  <td>Emotion Clock (n=579)</td>
                  <td>Emotion Board (n=506)</td>
                </tr>
              </thead>
              <tbody>
                <tr valign="top">
                  <td>Participant #1 (n=220)</td>
                  <td>134</td>
                  <td>86</td>
                </tr>
                <tr valign="top">
                  <td>Participant #2 (n=105)</td>
                  <td>43</td>
                  <td>62</td>
                </tr>
                <tr valign="top">
                  <td>Participant #3 (n=91)</td>
                  <td>31</td>
                  <td>60</td>
                </tr>
                <tr valign="top">
                  <td>Participant #4 (n=148)</td>
                  <td>63</td>
                  <td>85</td>
                </tr>
                <tr valign="top">
                  <td>Participant #5 (n=183)</td>
                  <td>62</td>
                  <td>121</td>
                </tr>
                <tr valign="top">
                  <td>Participant #6 (n=40)</td>
                  <td>24</td>
                  <td>16</td>
                </tr>
                <tr valign="top">
                  <td>Participant #7 (n=97)</td>
                  <td>93</td>
                  <td>4</td>
                </tr>
                <tr valign="top">
                  <td>Participant #8 (n=148)</td>
                  <td>76</td>
                  <td>72</td>
                </tr>
                <tr valign="top">
                  <td>Participant #9 (n=53)</td>
                  <td>53</td>
                  <td>N/A<sup>a</sup></td>
                </tr>
              </tbody>
            </table>
            <table-wrap-foot>
              <fn id="table4fn1">
                <p><sup>a</sup>Not applicable.</p>
              </fn>
            </table-wrap-foot>
          </table-wrap>
          <p>In examining the emotions that were logged, there are 3 main groupings, with neutral emotions being logged most frequently (relaxed [n=318], calm [n=276], and bored [n=72]), followed by positive emotions (happy [n=272] and excited [n=76]), with more negative emotions logged rarely (sad [n=31], nervous [n=26], and annoyed [n=14]).</p>
          <p>Seven of our participants developed a routine as to when they logged emotions through the devices. Each of these routines was somewhat similar, with all of these participants regularly logging in the morning and evenings, with additional logs throughout the day if seeing the device prompted them to think about logging:</p>
          <disp-quote>
            <p>…I have very low energy in the morning. So I usually changed it in the morning. I’d look at it usually, certainly, in the evening as well because at that point I’d be feeling more energetic and lively. During the day, I think, really if... Most of the time, I’m feeling fairly calm and cheerful.</p>
            <attrib>Participant #1</attrib>
          </disp-quote>
          <p>The remaining 2 participants had less of a routine around logging, relying on the prompts from the device or seeing the device as a reminder about logging:</p>
          <disp-quote>
            <p>…I don’t think there was any specific time. It was when I suddenly thought, “Oh, I haven’t done that yet,” or I’d been out and I think, “I must do that when I get back.”</p>
            <attrib>Participant #2</attrib>
          </disp-quote>
          <p>All but one of the participants placed the devices in the living room, perhaps the most public area in the home. This decision appeared to be driven by the convenience of using the device in the room most used and where the device would act as a reminder.</p>
          <p>For the 8 participants who placed the devices in the living room, there was no concern about their last logged emotion being publicly visible, with their visitors generally being people they would freely discuss their emotions with (friends, family, etc). Instead, the devices acted as a talking point about the purpose of our study, which often led to a discussion of self-reflection:</p>
          <disp-quote>
            <p>…I found people were interested in it and often noticed it when they visited, and were interested in the whole idea. I had some friends round, there was quite a long conversation about mood and how you recognize mood. It was a talking point quite a lot of times... when you talked about it, they could recognize that it could be actually quite a clever way of getting you to recognize your mood and to understand how your mood changed.</p>
            <attrib>Participant #1</attrib>
          </disp-quote>
          <p>The participant who did not publicly display the devices, placed them within their study—a room they spend large amounts of time in (and were thus prompted by seeing the device), without advertising their emotions to visitors.</p>
        </sec>
        <sec>
          <title>Perceived Need to Record Emotion</title>
          <p>Five of our participants saw value in the devices as tools to monitor their own emotions, use that monitoring as a prompt for self-reflection and, if necessary, make changes to improve their emotional state:</p>
          <disp-quote>
            <p>…it’s a good idea, because it makes you think about your mood, so therefore, you have to think before you select. So where, normally, I wouldn’t bother-I’d just rush through the day.</p>
            <attrib>Participant #5</attrib>
          </disp-quote>
          <p>Eight of our participants also saw the monitoring as potentially a useful mechanism for sharing their feelings over time with other people. This was predominantly in the context of well-being and identifying whether family or friends needed to undertake some action as the person monitoring had seen a persistent or severe change in emotion. Of these 8 participants, 5 would have been happy to share their emotions with loved ones:</p>
          <disp-quote>
            <p>…I think I would be more open to indicating than saying probably. That might be just a man thing but it’s you know I mean I just feel that I have to be happy and positive all the time.</p>
            <attrib>Participant #7</attrib>
          </disp-quote>
          <p>The remaining 3 participants indicated that they would be more comfortable with sharing with clinicians (eg, their doctor), would not be comfortable with sharing at all, or could see the value in sharing but did not feel they were at that life stage yet (which did not correspond with participant age). This led us to consider whether participants who were less willing to share had a different profile of logged emotions (eg, whether they had a greater percentage of negative emotions). Comparing the participants’ willingness to share their logged emotions with the emotions that participants had logged through the devices did not establish a clear pattern, with willingness to share more likely related to an individual’s feelings of privacy.</p>
        </sec>
        <sec>
          <title>Use of the Devices</title>
          <p>Having noted that most participants identified a perceived need for the devices, it is necessary to consider what evidence there is that the devices had value to our participants. Five of our participants found that both of the devices helped them reflect on their emotions, with another 2 participants reporting this was only the case for the Emotion Clock and the Emotion Board. The ability to regularly log an emotion was a sufficient prompt to provide a scaffold for all of these participants to reflect on their emotional state:</p>
          <disp-quote>
            <p>…I think I thought about my moods quite a bit more, how I was really feeling, you know... Using it has had a positive impact, yes, because I’ve had to really think about how I feel.</p>
            <attrib>Participant #2</attrib>
          </disp-quote>
          <p>This was particularly the case during significant occasions. For participant #1 over their birthday and for participant #2 when their dog died, they found that the devices were particularly helpful in encouraging them to reflect on how they were feeling.</p>
          <p>Most of our participants would like to continue using the devices. When explicitly asked whether they would like to continue monitoring their emotions using our devices, 5 of the participants saw clear value in them and would like to continue using them. None of these participants expressed a preference for only continuing with one of the devices. The remaining 4 participants did not like to continue using the devices, mainly as they did not perceive any derived benefit from their use. This included the 3 participants who did not consider themselves at a life stage of needing such a device; therefore, their disinterest was not a matter of dislike but rather of current lack in perceived need for emotional well-being management.</p>
        </sec>
        <sec>
          <title>Device Preferences</title>
          <p>Although the devices share certain design characteristics, the nature of interaction is significantly different. The clock offers a quick, immediate, and limited choice, while the board offers a more open-ended exploratory wide-ranging selection. It is worth examining how our participants engaged with these distinct designs and what can be learnt from those engagements.</p>
          <p>For the Emotion Clock, 7 participants praised the simplicity of the design, stating:</p>
          <disp-quote>
            <p>“it was easy enough to use.”</p>
            <attrib>Participant #9</attrib>
          </disp-quote>
          <p>These participants went on to discuss how the specificity of the emotions listed was not necessarily the emotions they wanted to record:</p>
          <disp-quote>
            <p>…I am, actually, a very busy person, which is why I say you should have that on there. If you’re busy, you’re not necessarily relaxed or calm. (Laughter) You’re just busy. Obviously, ‘lonely’ is not on here.</p>
            <attrib>Participant #3</attrib>
          </disp-quote>
          <p>This raises a question of the value of customizability, but in personalizing the words available for participants to select, the link between the device and the underlying psychological scales is removed. In contrast, only 2 participants felt that the Emotion Board (participant #3 and participant #4) was easy to use. Four participants felt that the Emotion Board was relatively difficult to understand, with the open nature of the interaction causing confusion:</p>
          <disp-quote>
            <p>…I sometimes found it a bit difficult to quite understand the square. I tended to move the thing round the edges of the square, I wasn’t sure how the middle works and whether that calibrated things differently into the center.</p>
            <attrib>Participant #1</attrib>
          </disp-quote>
          <p>For some participants, this meant that they did not feel comfortable exploring the range of options through the Emotion Board, thereby reducing the use of the device as they did not understand the continuum nature of the design. However, 5 participants felt that while the Emotion Board was harder to understand, the necessary thought could help provoke further engagement and reflection:</p>
          <disp-quote>
            <p>…I had to think about that more... I certainly had to think about it more than with the [Emotion Clock], because it was whether you were feeling up, down, you know, energized, not energized.</p>
            <attrib>Participant #3</attrib>
          </disp-quote>
          <p>Participant #4 also noted that they related more to associating feelings with colors than they did with words, making the Emotion Board much more meaningful for them.</p>
          <p>When our participants were asked which of the devices they preferred, the Emotion Clock was the most popular choice, with 6 of the participants preferring the simplicity of the interaction and the visual design. The other 2 participants, participant #4 and participant #8, preferred the open-ended interaction of the Emotion Board.</p>
        </sec>
      </sec>
      <sec>
        <title>Design Characteristics</title>
        <p>Having explored the specific design qualities of the individual devices, it is worth considering the design characteristics the devices shared and how they influenced our participants. The 2 devices shared certain design characteristics, particularly a shared aesthetics and a shared prompting system.</p>
        <p>Six of our participants discussed the aesthetics of the devices without being prompted. All 6 were positive about the designs, noting that constructing the devices from wood made the devices pleasant to look at and made them blend in to the home environment. This is important as the aesthetics of the devices are likely an important factor as to whether people are likely to use the devices for long-term use; we would argue that if people are pleased by having the device in the house, they are much more likely to engage with the emotion logging in the long term.</p>
        <p>As reported earlier, only 2 of our participants relied on the prompts for logging emotions, with the other 7 participants developing their own routine. All of the participants noted that the audio prompting was not annoying and not distracting. Participant #1 noted that on occasion, the prompt could be useful as an occasional reminder, while participant #2 suggested increasing the frequency to 4 times a day as a more regular prompt. In general, though, our moderate prompting appears to have been appropriate.</p>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Value of the Devices</title>
        <p>The focus of this work has been in evaluating the value of our tangible emotion logging devices for older adults. Our results demonstrate that our tangible devices can record data comparable to psychological scales of emotion. Such a finding validates the use of TUIs in this context and demonstrates that such devices could hold value for older adults. Furthermore, the level of use of the devices from our participants indicates that the participants saw some value in using the devices. The devices hold certain design properties that supported this use, particularly reflection on sparse data, provision of no data history, and focus on minimal interactions.</p>
        <p>These properties are not unique in research into reflective logging technology. The value of reflecting on sparse data with minimal history is attracting increasing attention [<xref ref-type="bibr" rid="ref70">70</xref>,<xref ref-type="bibr" rid="ref71">71</xref>]. Further, focusing on minimal interaction is seen as a way for users to log meaningful data without becoming overburdened by the effort of logging [<xref ref-type="bibr" rid="ref63">63</xref>,<xref ref-type="bibr" rid="ref64">64</xref>,<xref ref-type="bibr" rid="ref72">72</xref>]. We have built on this work and demonstrated that these design qualities in a different context—tangible devices for older adults—can support meaningful emotional reflection. Our findings open the design space for further consideration of how tangible devices can support emotional logging and reflection.</p>
        <p>More specifically, our work also contributes to 2 ongoing interrelated debates within the field: the role of reflection in designs such as ours and the value of customizability in logging devices.</p>
      </sec>
      <sec>
        <title>The Role of Reflection</title>
        <p>Along with much of the human-computer interaction field, we have been somewhat imprecise in our treatment of reflection in our work, providing no firm definition or placing it within a theoretical framework [<xref ref-type="bibr" rid="ref73">73</xref>]. To a certain extent, this was deliberate—our interest has been more on the design and success of the device rather than the mechanism through which users gained value. Although we operate under the assumption that the act of logging an emotional state would prompt users to think about their emotions and more broadly, their well-being in a form of reflection-in-action [<xref ref-type="bibr" rid="ref58">58</xref>], we have not attempted to demonstrate that this mechanism is how our users gained value from the devices.</p>
        <p>One of the key debates over supporting reflection through interaction design is the process by which reflection occurs. The model from Li et al [<xref ref-type="bibr" rid="ref74">74</xref>] argues that reflection only happens at 1 stage of the reflection life-cycle, after preparation, collection, and integration, with the reflection leading to an action. This contrasts with the model from Epstein et al [<xref ref-type="bibr" rid="ref61">61</xref>], which is more cyclical, with reflection taking place during an activity as well as afterwards.</p>
        <p>Our research supports work that has demonstrated that people can reflect on relatively sparse data [<xref ref-type="bibr" rid="ref75">75</xref>]. Our results suggest that a simple interaction, with no recorded history, is sufficient to support some users in reflecting on their emotional state. This is much closer to the Epstein et al’s [<xref ref-type="bibr" rid="ref61">61</xref>] model of reflection. None of our participants requested to see their recorded data at any point during the study, further suggesting that focusing on the design of the logging experience rather than on the historical record could be more beneficial to users.</p>
        <p>One of the aims of personal informatics is to support behavior change and self-improvement by helping people become more self-aware. Some researchers have proposed that to do this effectively, we should not be constrained by supporting the consideration of past events but provide recommendations for future actions [<xref ref-type="bibr" rid="ref76">76</xref>]. Such systems involve a combination of different subsystems. These include interfaces and device development, the design of analysis algorithms, and a complex sociotechnical mechanism for supporting the recommended actions.</p>
        <p>Instead of attempting to construct all of the elements of such a system, we have focused on a single element (the interface design and device development), with results indicating that well-designed interfaces can be sufficient for some people to derive value from them. It remains an open question for the field as to whether such results can be enhanced by connecting such an interface to a well-designed and validated sociotechnical system for supporting deeper reflective actions. Given the complexity of the necessary “ongoing negotiation of the boundaries and meanings of self within an anxious alliance of knowledge, bodies, devices, and data” that is necessary for effective long-term use of logging technologies [<xref ref-type="bibr" rid="ref77">77</xref>], we have provided a starting point for exploring the value of tangibles in this alliance.</p>
      </sec>
      <sec>
        <title>The Value of Customizability</title>
        <p>Some participants noted that they would have liked to have been able to customize the devices so that they were logging emotions more linked to their day-to-day experiences. Although this is perfectly feasible from a design perspective, it does remove the link between the device design and the underlying validated psychological scales being used. Our focus on ensuring the devices are linked to the validated psychological scales comes from the broader context of this work, where the research team is part of a project investigating home-based health monitoring technology. Working with clinicians, there was a focus on ensuring that if the data were later to be shared with clinicians or other stakeholders, it would be possible to understand the data in the context of an established framework.</p>
        <p>This dichotomy is representative of a long-standing concern within the personal informatics community, with some researchers exploring better ways of aggregating and analyzing precise quantifiable data [<xref ref-type="bibr" rid="ref78">78</xref>,<xref ref-type="bibr" rid="ref79">79</xref>] and others arguing for a switch from a focus on “behavior and its objective data to the self and its subjective meanings” [<xref ref-type="bibr" rid="ref71">71</xref>].</p>
        <p>An alternative approach would be to design around affect labelling. This regulation technique can be described as asking people to put their feelings into words [<xref ref-type="bibr" rid="ref80">80</xref>], which can help people regulate their emotions [<xref ref-type="bibr" rid="ref81">81</xref>]. This could prove an interesting route of customization for 2 reasons. First, it would be aggregating the labels in a meaningful way so that the historical record is useful to both the person logging and any related need (eg, with a clinician or carer). If the labels were restricted to a wide (but standardized) set such as Plutchik’s Wheel of Emotions [<xref ref-type="bibr" rid="ref82">82</xref>] or the Geneva Emotion Wheel [<xref ref-type="bibr" rid="ref83">83</xref>], this aggregation could still take place automatically. Second, given the value of affect labelling comes from its open-ended nature, this is a design challenge in translating such a technique into a tangible logging tool.</p>
      </sec>
      <sec>
        <title>Limitations and Further Work</title>
        <p>We are working in an imprecise area of human experience. This means our findings and conclusions must be tempered by known limitations as discussed below.</p>
        <p>Our first limitation stems from the design decisions we made. First, the Emotion Board makes strong use of color. Color is an inappropriate prompt for people with color blindness, and we have not accounted for the cultural implications inherent in color. Second, our devices do not cover fleeting emotions, as discussed by 2 of our participants. Third, by focusing on tangible technology suitable for the home, the resulting design was not suitable for logging emotions in outside contexts, as noted by 3 participants. Although we acknowledge these limitations as properties of our designs, they also indicate promising directions for further work.</p>
        <p>The study methodology has a limitation in that we are unable to report the extent to which the participants’ accuracy of interpreting the emotion expressed in the ANET vignettes was influenced by their personal ability to understand other people’s emotions or their personal emotional reactions to the stimuli. We decided against screening participants based on their ability to interpret emotions from the vignettes and compensated for this by asking for the <italic>participant descriptions</italic>.</p>
        <p>Additionally, we have no mechanism for comparing the data that participants logged during the field trial and how those participants were actually feeling. Although none of the participants raised this as an issue during the interviews, we cannot be completely certain as to whether participants tended to underlog or overlog particular types of emotions. Methodologically, this remains a challenge.</p>
        <p>More broadly, our participant pool is relatively small and further work is needed to explore the generalizability of our results. The size of our study was directly limited by the COVID-19 pandemic, with one study cut short (participant #9) and 2 recruited participants unable to take part (participant #10, participant #11). Given that we were unable to safely distribute the tangible artefacts to a particularly COVID-vulnerable population, we were unable to extend the number of participants within the study. Furthermore, 6 of our participants were recruited through contact with the authors through word of mouth or previous participation in other studies. Although we have no personal relationship with these participants, they are more likely to be engaged in this kind of research and more technically able than the population as a whole. This convenience sampling also led to a gender imbalance among our participants. While limiting the strength of the evidence, we are not arguing that our results are replicable across the population at large, but we argue that our work provides promising results and indicates further research directions.</p>
      </sec>
      <sec>
        <title>Conclusion and Future Work</title>
        <p>In this paper, we have contributed one of the first empirical investigations into the suitability of using tangible devices based on standardized scales of emotion for older adults to log emotions. We conclude that our devices are sufficiently accurate in collecting emotional data from older adults. Additionally, our work demonstrates the potential for using tangible devices to assist older adults in logging their emotional state to support reflection and emotional well-being. We argue that there is a significant amount of future work needed to extend this work by exploring whether this value holds when using tangibility as a design property of more self-expressive logging technology for older adults. Given the sharp divide between the competing interests of generalizability and customizability, it is clear that designers have to establish what is more important to their user base. They should also ensure that their users have alternative options if their preferences change over time.</p>
        <p>We argue that this success highlights the suitability for tangible devices to be used for long-term logging within the home. This study provides foundational support for tangible emotion self-logging devices for older adults and justifies further large-scale field studies exploring the effects of each device type on long-term engagement. In future work, we plan on exploring 2 interrelated aspects: (1) whether tangibility can be developed as a design quality for more self-expressive logging technologies and (2) exploring how to develop resilient sociotechnical support that responds to the data being logged by older adults. In doing so, we will better understand how tangible devices can help older adults wanting to maintain and improve their long-term well-being.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group>
      <supplementary-material id="app1">
        <label>Multimedia Appendix 1</label>
        <p>Emotional logging summary tables.</p>
        <media xlink:href="humanfactors_v9i2e34606_app1.pdf" xlink:title="PDF File  (Adobe PDF File), 77 KB"/>
      </supplementary-material>
      <supplementary-material id="app2">
        <label>Multimedia Appendix 2</label>
        <p>Interview questions.</p>
        <media xlink:href="humanfactors_v9i2e34606_app2.pdf" xlink:title="PDF File  (Adobe PDF File), 121 KB"/>
      </supplementary-material>
    </app-group>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">ANET</term>
          <def>
            <p>Affective Norms for English Text</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">SAM</term>
          <def>
            <p>self-assessment manikin</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">TUI</term>
          <def>
            <p>tangible user interface</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This work was completed under support from the UK Engineering and Physical Sciences Research Council grant EP/P01013X/1 and EP/V027263/1 as well as Science Foundation Ireland grant 13/RC/2094.</p>
    </ack>
    <fn-group>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="web">
          <article-title>World population prospects: the 2017 revision</article-title>
          <source>The United Nations</source>
          <access-date>2022-04-11</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.un.org/development/desa/pd/sites/www.un.org.development.desa.pd/files/files/documents/2020/Jan/un_2017_world_population_prospects-2017_revision_databooklet.pdf">https://www.un.org/development/desa/pd/sites/www.un.org.development.desa.pd/files/files/documents/2020/Jan/un_2017_world_population_prospects-2017_revision_databooklet.pdf</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="web">
          <article-title>Mental health of older adults</article-title>
          <source>World Health Organisation</source>
          <year>2017</year>
          <access-date>2022-04-11</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.who.int/news-room/fact-sheets/detail/mental-health-of-older-adults">https://www.who.int/news-room/fact-sheets/detail/mental-health-of-older-adults</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="web">
          <article-title>Future of an ageing population</article-title>
          <source>UK Government Office for Science</source>
          <access-date>2022-04-11</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.gov.uk/government/publications/future-of-an-ageing-population">https://www.gov.uk/government/publications/future-of-an-ageing-population</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pynoos</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Nishita</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Kendig</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Birren</surname>
              <given-names>JE</given-names>
            </name>
          </person-group>
          <article-title>Housing</article-title>
          <source>Encyclopedia of Gerontology (Second Edition)</source>
          <year>2017</year>
          <publisher-loc>New York</publisher-loc>
          <publisher-name>Elsevier</publisher-name>
          <fpage>709</fpage>
          <lpage>719</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mynatt</surname>
              <given-names>ED</given-names>
            </name>
            <name name-style="western">
              <surname>Essa</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Rogers</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Increasing the opportunities for aging in place</article-title>
          <year>2000</year>
          <conf-name>Proceedings on the 2000 conference on Universal Usability (CUU '00)</conf-name>
          <conf-date>November 16-17, 2000</conf-date>
          <conf-loc>Arlington, Virginia, USA</conf-loc>
          <pub-id pub-id-type="doi">https://doi.org/10.1145/355460.355475</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kendig</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Clemson</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Mackenzie</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Smith</surname>
              <given-names>SJ</given-names>
            </name>
          </person-group>
          <article-title>Older people: well-being, housing and neighbourhoods</article-title>
          <source>International Encyclopedia of Housing and Home</source>
          <year>2012</year>
          <publisher-loc>Amsterdam</publisher-loc>
          <publisher-name>Elsevier</publisher-name>
          <fpage>150</fpage>
          <lpage>155</lpage>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wiles</surname>
              <given-names>JL</given-names>
            </name>
            <name name-style="western">
              <surname>Leibing</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Guberman</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Reeve</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Allen</surname>
              <given-names>RES</given-names>
            </name>
          </person-group>
          <article-title>The meaning of "aging in place" to older people</article-title>
          <source>Gerontologist</source>
          <year>2012</year>
          <month>06</month>
          <volume>52</volume>
          <issue>3</issue>
          <fpage>357</fpage>
          <lpage>66</lpage>
          <pub-id pub-id-type="doi">10.1093/geront/gnr098</pub-id>
          <pub-id pub-id-type="medline">21983126</pub-id>
          <pub-id pub-id-type="pii">gnr098</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kientz</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Patel</surname>
              <given-names>SN</given-names>
            </name>
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Price</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Mynatt</surname>
              <given-names>ED</given-names>
            </name>
            <name name-style="western">
              <surname>Abowd</surname>
              <given-names>GD</given-names>
            </name>
          </person-group>
          <article-title>The Georgia Tech aware home</article-title>
          <year>2008</year>
          <conf-name>CHI '08 Extended Abstracts on Human Factors in Computing Systems</conf-name>
          <conf-date>April 5-8, 2008</conf-date>
          <conf-loc>Montréal</conf-loc>
          <fpage>3675</fpage>
          <lpage>3680</lpage>
          <pub-id pub-id-type="doi">10.1145/1358628.1358911</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Amiribesheli</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Benmansour</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Bouchachia</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>A review of smart homes in healthcare</article-title>
          <source>J Ambient Intell Human Comput</source>
          <year>2015</year>
          <month>3</month>
          <day>14</day>
          <volume>6</volume>
          <issue>4</issue>
          <fpage>495</fpage>
          <lpage>517</lpage>
          <pub-id pub-id-type="doi">10.1007/s12652-015-0270-2</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Adib</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Mao</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Kabelac</surname>
              <given-names>Z</given-names>
            </name>
            <name name-style="western">
              <surname>Katabi</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Miller</surname>
              <given-names>RC</given-names>
            </name>
          </person-group>
          <article-title>Smart homes that monitor breathing and heart rate</article-title>
          <year>2015</year>
          <conf-name>Proceedings of the 33rd Annual ACM Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 18-23, 2015</conf-date>
          <conf-loc>Seoul</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2702123.2702200</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="web">
          <article-title>A compendium of fact sheets: Well-being across the life course</article-title>
          <source>UK Department of Health</source>
          <year>2014</year>
          <access-date>2022-04-11</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.gov.uk/government/publications/wellbeing-and-health-policy">https://www.gov.uk/government/publications/wellbeing-and-health-policy</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Massa</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Mazzali</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Zampini</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Zancanaro</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Casiddu</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Porfirione</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Monteriù</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Cavallo</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>Quantify yourself: are older adults ready</article-title>
          <source>Ambient Assisted Living</source>
          <year>2019</year>
          <publisher-loc>Berlin/Heidelberg, Germany</publisher-loc>
          <publisher-name>Springer</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Doyle</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Walsh</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Sassu</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>McDonagh</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Designing a wellness self-management tool for older adults: results from a field trial of YourWellness</article-title>
          <year>2014</year>
          <conf-name>Proceedings of the 8th International Conference on Pervasive Computing Technologies for Healthcare</conf-name>
          <conf-date>May 20-23, 2014</conf-date>
          <conf-loc>Brussels</conf-loc>
          <pub-id pub-id-type="doi">10.4108/icst.pervasivehealth.2014.254950</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Seiderer</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Hammer</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Andre</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Mayr</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Rist</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Exploring digital image frames for lifestyle intervention to improve well-being of older adults</article-title>
          <year>2015</year>
          <conf-name>Proceedings of the 5th International Conference on Digital Health 2015</conf-name>
          <conf-date>May 18, 2015</conf-date>
          <conf-loc>Florence, Italy</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2750511.2750514</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Desmet</surname>
              <given-names>PM</given-names>
            </name>
            <name name-style="western">
              <surname>Vastenburg</surname>
              <given-names>MH</given-names>
            </name>
            <name name-style="western">
              <surname>Romero</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Mood measurement with Pick-A-Mood: review of current methods and design of a pictorial self-report scale</article-title>
          <source>JDR</source>
          <year>2016</year>
          <volume>14</volume>
          <issue>3</issue>
          <fpage>241</fpage>
          <pub-id pub-id-type="doi">10.1504/jdr.2016.079751</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Broekens</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Brinkman</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>AffectButton: A method for reliable and valid affective self-report</article-title>
          <source>International Journal of Human-Computer Studies</source>
          <year>2013</year>
          <month>6</month>
          <volume>71</volume>
          <issue>6</issue>
          <fpage>641</fpage>
          <lpage>667</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1016/j.ijhcs.2013.02.003"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.ijhcs.2013.02.003</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sarzotti</surname>
              <given-names>F</given-names>
            </name>
          </person-group>
          <article-title>Self-Monitoring of Emotions and Mood Using a Tangible Approach</article-title>
          <source>Computers</source>
          <year>2018</year>
          <month>01</month>
          <day>08</day>
          <volume>7</volume>
          <issue>1</issue>
          <fpage>7</fpage>
          <pub-id pub-id-type="doi">10.3390/computers7010007</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Huisman</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>van Hout</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>van Dijk</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>van der Geest</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Heylen</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>LEMtool: measuring emotions in visual interfaces</article-title>
          <year>2013</year>
          <conf-name>Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 27-May 3, 2013</conf-date>
          <conf-loc>Paris</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2470654.2470706</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Hong</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Designing for self-tracking of emotion and experience with tangible modality</article-title>
          <year>2017</year>
          <conf-name>Proceedings of the 2017 Conference on Designing Interactive Systems</conf-name>
          <conf-date>June 10-14, 2017</conf-date>
          <conf-loc>Edinburgh</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3064663.3064697</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rivera-Pelayo</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Fessl</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Müller</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Pammer</surname>
              <given-names>V</given-names>
            </name>
          </person-group>
          <article-title>Introducing Mood Self-Tracking at Work</article-title>
          <source>ACM Trans Comput-Hum Interact</source>
          <year>2017</year>
          <month>03</month>
          <day>22</day>
          <volume>24</volume>
          <issue>1</issue>
          <fpage>1</fpage>
          <lpage>28</lpage>
          <pub-id pub-id-type="doi">10.1145/3014058</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Tang</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>Y</given-names>
            </name>
          </person-group>
          <article-title>Emotion map: a location-based mobile social system for improving emotion awareness and regulation</article-title>
          <year>2015</year>
          <conf-name>Proceedings of the 18th ACM Conference on Computer Supported Cooperative Work &#38; Social Computing</conf-name>
          <conf-date>March 14-18, 2015</conf-date>
          <conf-loc>Vancouver</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2675133.2675173</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hollis</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Konrad</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Springer</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Antoun</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Antoun</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Martin</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Whittaker</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>What Does All This Data Mean for My Future Mood? Actionable Analytics and Targeted Reflection for Emotional Well-Being</article-title>
          <source>Human–Computer Interaction</source>
          <year>2017</year>
          <month>03</month>
          <day>16</day>
          <volume>32</volume>
          <issue>5-6</issue>
          <fpage>208</fpage>
          <lpage>267</lpage>
          <pub-id pub-id-type="doi">10.1080/07370024.2016.1277724</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Fuentes</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Herskovic</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Rodríguez</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Gerea</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Marques</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Rossel</surname>
              <given-names>PO</given-names>
            </name>
          </person-group>
          <article-title>A systematic literature review about technologies for self-reporting emotional information</article-title>
          <source>J Ambient Intell Human Comput</source>
          <year>2016</year>
          <month>11</month>
          <day>11</day>
          <volume>8</volume>
          <issue>4</issue>
          <fpage>593</fpage>
          <lpage>606</lpage>
          <pub-id pub-id-type="doi">10.1007/s12652-016-0430-z</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Doyle</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>O’Mullane</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>McGee</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Knapp</surname>
              <given-names>RB</given-names>
            </name>
          </person-group>
          <article-title>YourWellness: designing an application to support positive emotional well-being in older adults</article-title>
          <year>2012</year>
          <conf-name>The 26th BCS Conference on Human Computer Interaction</conf-name>
          <conf-date>September 12-14, 2012</conf-date>
          <conf-loc>Birmingham</conf-loc>
          <pub-id pub-id-type="doi">10.14236/ewic/hci2012.28</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Dasgupta</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Chaudhry</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Koh</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Chawla</surname>
              <given-names>NV</given-names>
            </name>
          </person-group>
          <article-title>A Survey of Tablet Applications for Promoting Successful Aging in Older Adults</article-title>
          <source>IEEE Access</source>
          <year>2016</year>
          <volume>4</volume>
          <fpage>9005</fpage>
          <lpage>9017</lpage>
          <pub-id pub-id-type="doi">10.1109/access.2016.2632818</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Davidson</surname>
              <given-names>JL</given-names>
            </name>
            <name name-style="western">
              <surname>Jensen</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>What health topics older adults want to track: a participatory design study</article-title>
          <year>2013</year>
          <conf-name>Proceedings of the 15th International ACM SIGACCESS Conference on Computers and Accessibility</conf-name>
          <conf-date>October 21-23, 2013</conf-date>
          <conf-loc>Bellevue, Washington</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2513383.2513451</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ishii</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Tangible bits: beyond pixels</article-title>
          <year>2008</year>
          <conf-name>Proceedings of the 2nd international conference on Tangible and embedded interaction</conf-name>
          <conf-date>February 18-20, 2008</conf-date>
          <conf-loc>Bonn, Germany</conf-loc>
          <pub-id pub-id-type="doi">10.1145/1347390.1347392</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Spreicer</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Tangible interfaces as a chance for higher technology acceptance by the elderly</article-title>
          <year>2011</year>
          <conf-name>Proceedings of the 12th International Conference on Computer Systems and Technologies</conf-name>
          <conf-date>June 16-17, 2011</conf-date>
          <conf-loc>Vienna, Austria</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2023607.2023660</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Suhas Govind</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Brathen</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Lowering the threshold: reconnecting elderly users with assistive technology through tangible interfaces</article-title>
          <year>2016</year>
          <conf-name>International Conference on Human Aspects of IT for the Aged Population</conf-name>
          <conf-date>July 17-22, 2016</conf-date>
          <conf-loc>Toronto</conf-loc>
          <pub-id pub-id-type="doi">10.1007/978-3-319-39943-0_6</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bong</surname>
              <given-names>WK</given-names>
            </name>
            <name name-style="western">
              <surname>Chen</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Bergland</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Mandl</surname>
              <given-names>T</given-names>
            </name>
          </person-group>
          <article-title>Tangible User Interface for Social Interactions for the Elderly: A Review of Literature</article-title>
          <source>Advances in Human-Computer Interaction</source>
          <year>2018</year>
          <volume>2018</volume>
          <fpage>1</fpage>
          <lpage>15</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1155/2018/7249378"/>
          </comment>
          <pub-id pub-id-type="doi">10.1155/2018/7249378</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Gooch</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Mehta</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Price</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>McCormick</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Bandara</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Bennaceur</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Bennasar</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Stuart</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Clare</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Levine</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Cohen</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Nuseibeh</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>How are you feeling? using tangibles to log the emotions of older adults</article-title>
          <year>2020</year>
          <conf-name>Proceedings of the Fourteenth International Conference on Tangible, Embedded, and Embodied Interaction</conf-name>
          <conf-date>February 9-12, 2020</conf-date>
          <conf-loc>Sydney, Australia</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3374920.3374922</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Frijda</surname>
              <given-names>NH</given-names>
            </name>
          </person-group>
          <person-group person-group-type="editor">
            <name name-style="western">
              <surname>Ekman</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Davison</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Varieties of affect: emotions and episodes, moods, and sentiments</article-title>
          <source>The Nature of Emotions: Fundamental Questions</source>
          <year>1994</year>
          <publisher-loc>New York</publisher-loc>
          <publisher-name>Oxford University Press</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Gross</surname>
              <given-names>JJ</given-names>
            </name>
          </person-group>
          <article-title>The Emerging Field of Emotion Regulation: An Integrative Review</article-title>
          <source>Review of General Psychology</source>
          <year>1998</year>
          <month>09</month>
          <day>01</day>
          <volume>2</volume>
          <issue>3</issue>
          <fpage>271</fpage>
          <lpage>299</lpage>
          <pub-id pub-id-type="doi">10.1037/1089-2680.2.3.271</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Riva</surname>
              <given-names>Giuseppe</given-names>
            </name>
            <name name-style="western">
              <surname>Villani</surname>
              <given-names>Daniela</given-names>
            </name>
            <name name-style="western">
              <surname>Cipresso</surname>
              <given-names>Pietro</given-names>
            </name>
            <name name-style="western">
              <surname>Repetto</surname>
              <given-names>Claudia</given-names>
            </name>
            <name name-style="western">
              <surname>Triberti</surname>
              <given-names>Stefano</given-names>
            </name>
            <name name-style="western">
              <surname>Di Lernia</surname>
              <given-names>Daniele</given-names>
            </name>
            <name name-style="western">
              <surname>Chirico</surname>
              <given-names>Alice</given-names>
            </name>
            <name name-style="western">
              <surname>Serino</surname>
              <given-names>Silvia</given-names>
            </name>
            <name name-style="western">
              <surname>Gaggioli</surname>
              <given-names>Andrea</given-names>
            </name>
          </person-group>
          <article-title>Positive and Transformative Technologies for Active Ageing</article-title>
          <source>Stud Health Technol Inform</source>
          <year>2016</year>
          <volume>220</volume>
          <fpage>308</fpage>
          <lpage>15</lpage>
          <pub-id pub-id-type="medline">27046597</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kanis</surname>
              <given-names>Marije</given-names>
            </name>
            <name name-style="western">
              <surname>Robben</surname>
              <given-names>Saskia</given-names>
            </name>
            <name name-style="western">
              <surname>Kröse</surname>
              <given-names>Ben</given-names>
            </name>
          </person-group>
          <article-title>How Are You Doing? Enabling Older Adults to Enrich Sensor Data with Subjective Input</article-title>
          <source>In Proceedings of the 6th International Workshop on Human Behavior Understanding</source>
          <year>2015</year>
          <volume>9277</volume>
          <fpage>39</fpage>
          <lpage>51</lpage>
          <pub-id pub-id-type="doi">10.1007/978-3-319-24195-1_4</pub-id>
          <pub-id pub-id-type="medline">11778978</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pennebaker</surname>
              <given-names>JW</given-names>
            </name>
          </person-group>
          <article-title>Writing About Emotional Experiences as a Therapeutic Process</article-title>
          <source>Psychol Sci</source>
          <year>2016</year>
          <month>05</month>
          <day>06</day>
          <volume>8</volume>
          <issue>3</issue>
          <fpage>162</fpage>
          <lpage>166</lpage>
          <pub-id pub-id-type="doi">10.1111/j.1467-9280.1997.tb00403.x</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Isaacs</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Konrad</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Walendowski</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Lennig</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Hollis</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Whittaker</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Echoes from the past: how technology mediated reflection improves well-being</article-title>
          <year>2013</year>
          <conf-name>Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 27-May 2, 2013</conf-date>
          <conf-loc>Paris</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2470654.2466137</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hollis</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Konrad</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Whittaker</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Change of heart: emotion tracking to promote behavior change</article-title>
          <year>2015</year>
          <conf-name>Proceedings of the 33rd Annual ACM Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 18-23, 2015</conf-date>
          <conf-loc>Seoul</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2702123.2702196</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Yang</surname>
              <given-names>YS</given-names>
            </name>
            <name name-style="western">
              <surname>Ryu</surname>
              <given-names>GW</given-names>
            </name>
            <name name-style="western">
              <surname>Choi</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Methodological Strategies for Ecological Momentary Assessment to Evaluate Mood and Stress in Adult Patients Using Mobile Phones: Systematic Review</article-title>
          <source>JMIR Mhealth Uhealth</source>
          <year>2019</year>
          <month>04</month>
          <day>01</day>
          <volume>7</volume>
          <issue>4</issue>
          <fpage>e11215</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://mhealth.jmir.org/2019/4/e11215/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/11215</pub-id>
          <pub-id pub-id-type="medline">30932866</pub-id>
          <pub-id pub-id-type="pii">v7i4e11215</pub-id>
          <pub-id pub-id-type="pmcid">PMC6462888</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref40">
        <label>40</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bradley</surname>
              <given-names>MM</given-names>
            </name>
            <name name-style="western">
              <surname>Lang</surname>
              <given-names>PJ</given-names>
            </name>
          </person-group>
          <article-title>Measuring emotion: The self-assessment manikin and the semantic differential</article-title>
          <source>Journal of Behavior Therapy and Experimental Psychiatry</source>
          <year>1994</year>
          <month>03</month>
          <volume>25</volume>
          <issue>1</issue>
          <fpage>49</fpage>
          <lpage>59</lpage>
          <pub-id pub-id-type="doi">10.1016/0005-7916(94)90063-9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref41">
        <label>41</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Russell</surname>
              <given-names>JA</given-names>
            </name>
          </person-group>
          <article-title>A circumplex model of affect</article-title>
          <source>Journal of Personality and Social Psychology</source>
          <year>1980</year>
          <volume>39</volume>
          <issue>6</issue>
          <fpage>1161</fpage>
          <lpage>1178</lpage>
          <pub-id pub-id-type="doi">10.1037/h0077714</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref42">
        <label>42</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Barrett</surname>
              <given-names>LF</given-names>
            </name>
            <name name-style="western">
              <surname>Russell</surname>
              <given-names>JA</given-names>
            </name>
          </person-group>
          <article-title>The Structure of Current Affect</article-title>
          <source>Curr Dir Psychol Sci</source>
          <year>2016</year>
          <month>06</month>
          <day>22</day>
          <volume>8</volume>
          <issue>1</issue>
          <fpage>10</fpage>
          <lpage>14</lpage>
          <pub-id pub-id-type="doi">10.1111/1467-8721.00003</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref43">
        <label>43</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mehrabian</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Russell</surname>
              <given-names>JA</given-names>
            </name>
          </person-group>
          <source>An Approach to Environmental Psychology</source>
          <year>1974</year>
          <publisher-loc>Cambridge</publisher-loc>
          <publisher-name>MIT Press</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref44">
        <label>44</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Read</surname>
              <given-names>JC</given-names>
            </name>
          </person-group>
          <article-title>Validating the Fun Toolkit: an instrument for measuring children’s opinions of technology</article-title>
          <source>Cogn Tech Work</source>
          <year>2007</year>
          <month>5</month>
          <day>22</day>
          <volume>10</volume>
          <issue>2</issue>
          <fpage>119</fpage>
          <lpage>128</lpage>
          <pub-id pub-id-type="doi">10.1007/s10111-007-0069-9</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref45">
        <label>45</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Goodday</surname>
              <given-names>SM</given-names>
            </name>
            <name name-style="western">
              <surname>Atkinson</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Goodwin</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Saunders</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>South</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Mackay</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Denis</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Hinds</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Attenburrow</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Davies</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Welch</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Stevens</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Mansfield</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Suvilehto</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Geddes</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>The True Colours Remote Symptom Monitoring System: A Decade of Evolution</article-title>
          <source>J Med Internet Res</source>
          <year>2020</year>
          <month>01</month>
          <day>15</day>
          <volume>22</volume>
          <issue>1</issue>
          <fpage>e15188</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2020/1/e15188/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/15188</pub-id>
          <pub-id pub-id-type="medline">31939746</pub-id>
          <pub-id pub-id-type="pii">v22i1e15188</pub-id>
          <pub-id pub-id-type="pmcid">PMC6996723</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref46">
        <label>46</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Tsanas</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Saunders</surname>
              <given-names>KEA</given-names>
            </name>
            <name name-style="western">
              <surname>Bilderbeck</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>Palmius</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Osipov</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Clifford</surname>
              <given-names>GD</given-names>
            </name>
            <name name-style="western">
              <surname>Goodwin</surname>
              <given-names>GΜ</given-names>
            </name>
            <name name-style="western">
              <surname>De Vos</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Daily longitudinal self-monitoring of mood variability in bipolar disorder and borderline personality disorder</article-title>
          <source>J Affect Disord</source>
          <year>2016</year>
          <month>11</month>
          <day>15</day>
          <volume>205</volume>
          <fpage>225</fpage>
          <lpage>233</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S0165-0327(16)30781-9"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.jad.2016.06.065</pub-id>
          <pub-id pub-id-type="medline">27449555</pub-id>
          <pub-id pub-id-type="pii">S0165-0327(16)30781-9</pub-id>
          <pub-id pub-id-type="pmcid">PMC5296237</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref47">
        <label>47</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Perez Arribas</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Goodwin</surname>
              <given-names>GM</given-names>
            </name>
            <name name-style="western">
              <surname>Geddes</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Lyons</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Saunders</surname>
              <given-names>KEA</given-names>
            </name>
          </person-group>
          <article-title>A signature-based machine learning model for distinguishing bipolar disorder and borderline personality disorder</article-title>
          <source>Transl Psychiatry</source>
          <year>2018</year>
          <month>12</month>
          <day>13</day>
          <volume>8</volume>
          <issue>1</issue>
          <fpage>274</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1038/s41398-018-0334-0"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41398-018-0334-0</pub-id>
          <pub-id pub-id-type="medline">30546013</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41398-018-0334-0</pub-id>
          <pub-id pub-id-type="pmcid">PMC6293318</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref48">
        <label>48</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Palmius</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Osipov</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Bilderbeck</surname>
              <given-names>AC</given-names>
            </name>
            <name name-style="western">
              <surname>Goodwin</surname>
              <given-names>GM</given-names>
            </name>
            <name name-style="western">
              <surname>Saunders</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Tsanas</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Clifford</surname>
              <given-names>GD</given-names>
            </name>
          </person-group>
          <article-title>A multi-sensor monitoring system for objective mental health management in resource constrained environments</article-title>
          <year>2014</year>
          <conf-name>Appropriate Healthcare Technologies for Low Resource Settings</conf-name>
          <conf-date>September 17-18, 2014</conf-date>
          <conf-loc>London</conf-loc>
          <pub-id pub-id-type="doi">10.1049/cp.2014.0764</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref49">
        <label>49</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhang</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Nicholas</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Knapp</surname>
              <given-names>AA</given-names>
            </name>
            <name name-style="western">
              <surname>Graham</surname>
              <given-names>AK</given-names>
            </name>
            <name name-style="western">
              <surname>Gray</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Kwasny</surname>
              <given-names>MJ</given-names>
            </name>
            <name name-style="western">
              <surname>Reddy</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Mohr</surname>
              <given-names>DC</given-names>
            </name>
          </person-group>
          <article-title>Clinically Meaningful Use of Mental Health Apps and its Effects on Depression: Mixed Methods Study</article-title>
          <source>J Med Internet Res</source>
          <year>2019</year>
          <month>12</month>
          <day>20</day>
          <volume>21</volume>
          <issue>12</issue>
          <fpage>e15644</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2019/12/e15644/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/15644</pub-id>
          <pub-id pub-id-type="medline">31859682</pub-id>
          <pub-id pub-id-type="pii">v21i12e15644</pub-id>
          <pub-id pub-id-type="pmcid">PMC6942194</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref50">
        <label>50</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Chandrashekar</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Do mental health mobile apps work: evidence and recommendations for designing high-efficacy mental health mobile apps</article-title>
          <source>Mhealth</source>
          <year>2018</year>
          <volume>4</volume>
          <fpage>6</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.21037/mhealth.2018.03.02"/>
          </comment>
          <pub-id pub-id-type="doi">10.21037/mhealth.2018.03.02</pub-id>
          <pub-id pub-id-type="medline">29682510</pub-id>
          <pub-id pub-id-type="pii">mh-04-2018.03.02</pub-id>
          <pub-id pub-id-type="pmcid">PMC5897664</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref51">
        <label>51</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Morris</surname>
              <given-names>ME</given-names>
            </name>
            <name name-style="western">
              <surname>Kathawala</surname>
              <given-names>Q</given-names>
            </name>
            <name name-style="western">
              <surname>Leen</surname>
              <given-names>TK</given-names>
            </name>
            <name name-style="western">
              <surname>Gorenstein</surname>
              <given-names>EE</given-names>
            </name>
            <name name-style="western">
              <surname>Guilak</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Labhard</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Deleeuw</surname>
              <given-names>W</given-names>
            </name>
          </person-group>
          <article-title>Mobile therapy: case study evaluations of a cell phone application for emotional self-awareness</article-title>
          <source>J Med Internet Res</source>
          <year>2010</year>
          <month>04</month>
          <day>30</day>
          <volume>12</volume>
          <issue>2</issue>
          <fpage>e10</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2010/2/e10/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/jmir.1371</pub-id>
          <pub-id pub-id-type="medline">20439251</pub-id>
          <pub-id pub-id-type="pii">v12i2e10</pub-id>
          <pub-id pub-id-type="pmcid">PMC2885784</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref52">
        <label>52</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Fernández</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Rodríguez</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Rossel</surname>
              <given-names>PO</given-names>
            </name>
            <name name-style="western">
              <surname>Fuentes</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>InMyDay: a digital diary to promote self-care among elders</article-title>
          <year>2017</year>
          <conf-name>11th International Conference on Ubiquitous Computing &#38; Ambient Intelligence</conf-name>
          <conf-date>November 7-10, 2017</conf-date>
          <conf-loc>Philadelphia</conf-loc>
          <pub-id pub-id-type="doi">10.1007/978-3-319-67585-5_49</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref53">
        <label>53</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Fuentes</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Rodríguez</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Herskovic</surname>
              <given-names>V</given-names>
            </name>
          </person-group>
          <article-title>EmoBall: A Study on a Tangible Interface to Self-report Emotional Information Considering Digital Competences</article-title>
          <year>2015</year>
          <conf-name>Ambient Intelligence for Health</conf-name>
          <conf-date>December 1-4, 2015</conf-date>
          <conf-loc>Puerto Varas</conf-loc>
          <fpage>189</fpage>
          <lpage>200</lpage>
          <pub-id pub-id-type="doi">10.1007/978-3-319-26508-7_19</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref54">
        <label>54</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Balaam</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Fitzpatrick</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Good</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Luckin</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Exploring affective technologies for the classroom with the subtle stone</article-title>
          <year>2010</year>
          <conf-name>Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 10-15, 2010</conf-date>
          <conf-loc>Atlanta</conf-loc>
          <pub-id pub-id-type="doi">10.1145/1753326.1753568</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref55">
        <label>55</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jingar</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Lindgren</surname>
              <given-names>H</given-names>
            </name>
          </person-group>
          <article-title>Tangible communication of emotions with a digital companion for managing stress: an exploratory co-design study</article-title>
          <year>2019</year>
          <conf-name>Proceedings of the 7th International Conference on Human-Agent Interaction</conf-name>
          <conf-date>October 6-10, 2019</conf-date>
          <conf-loc>Kyoto</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3349537.3351907</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref56">
        <label>56</label>
        <nlm-citation citation-type="web">
          <article-title>State of musculoskeletal health report</article-title>
          <source>Arthritis Research UK</source>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.versusarthritis.org/about-arthritis/data-and-statistics/the-state-of-musculoskeletal-health/">https://www.versusarthritis.org/about-arthritis/data-and-statistics/the-state-of-musculoskeletal-health/</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref57">
        <label>57</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Salivia</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Hourcade</surname>
              <given-names>JP</given-names>
            </name>
          </person-group>
          <article-title>PointAssist: assisting individuals with motor impairments</article-title>
          <year>2013</year>
          <conf-name>Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 27-May 2, 2013</conf-date>
          <conf-loc>Paris</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2470654.2466157</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref58">
        <label>58</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Schon</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <source>The Reflective Practitioner: How Professionals Think In Action</source>
          <year>1984</year>
          <publisher-loc>New York</publisher-loc>
          <publisher-name>Basic Books</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref59">
        <label>59</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ayobi</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Sonne</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Marshall</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Cox</surname>
              <given-names>AL</given-names>
            </name>
          </person-group>
          <article-title>Flexible and mindful self-tracking: design implications from paper bullet journals</article-title>
          <year>2018</year>
          <conf-name>Proceedings of the 2018 CHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 21-26, 2018</conf-date>
          <conf-loc>Montreal</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3173574.3173602</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref60">
        <label>60</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Choe</surname>
              <given-names>EK</given-names>
            </name>
            <name name-style="western">
              <surname>Abdullah</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Rabbi</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Thomaz</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Epstein</surname>
              <given-names>DA</given-names>
            </name>
            <name name-style="western">
              <surname>Cordeiro</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Kay</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Abowd</surname>
              <given-names>GD</given-names>
            </name>
            <name name-style="western">
              <surname>Choudhury</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Fogarty</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Matthews</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Kientz</surname>
              <given-names>JA</given-names>
            </name>
          </person-group>
          <article-title>Semi-Automated Tracking: A Balanced Approach for Self-Monitoring Applications</article-title>
          <source>IEEE Pervasive Comput</source>
          <year>2017</year>
          <month>1</month>
          <volume>16</volume>
          <issue>1</issue>
          <fpage>74</fpage>
          <lpage>84</lpage>
          <pub-id pub-id-type="doi">10.1109/mprv.2017.18</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref61">
        <label>61</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Epstein</surname>
              <given-names>DA</given-names>
            </name>
            <name name-style="western">
              <surname>Ping</surname>
              <given-names>AN</given-names>
            </name>
            <name name-style="western">
              <surname>Fogarty</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Munson</surname>
              <given-names>SA</given-names>
            </name>
          </person-group>
          <article-title>A lived informatics model of personal informatics</article-title>
          <year>2015</year>
          <conf-name>Proceedings of the 2015 ACM International Joint Conference on Pervasive and Ubiquitous Computing</conf-name>
          <conf-date>September 7-11, 2015</conf-date>
          <conf-loc>Osaka</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2750858.2804250</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref62">
        <label>62</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Thudt</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Hinrichs</surname>
              <given-names>U</given-names>
            </name>
            <name name-style="western">
              <surname>Huron</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Carpendale</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Self-reflection and personal physicalization construction</article-title>
          <year>2018</year>
          <conf-name>Proceedings of the 2018 CHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 21-26, 2018</conf-date>
          <conf-loc>Montreal</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3173574.3173728</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref63">
        <label>63</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Choe</surname>
              <given-names>EK</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Kay</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Pratt</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Kientz</surname>
              <given-names>JA</given-names>
            </name>
          </person-group>
          <article-title>SleepTight: low-burden, self-monitoring technology for capturing and reflecting on sleep behaviors</article-title>
          <year>2015</year>
          <conf-name>Proceedings of the 2015 ACM International Joint Conference on Pervasive and Ubiquitous Computing</conf-name>
          <conf-date>September 7-11, 2015</conf-date>
          <conf-loc>Osaka</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2750858.2804266</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref64">
        <label>64</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ferrario</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Simm</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Gradinar</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Forshaw</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Smith</surname>
              <given-names>MT</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Smith</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Whittle</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Computing and mental health: intentionality and reflection at the click of a button</article-title>
          <year>2017</year>
          <conf-name>Proceedings of the 11th EAI International Conference on Pervasive Computing Technologies for Healthcare</conf-name>
          <conf-date>May 23-26, 2017</conf-date>
          <conf-loc>Barcelona</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3154862.3154877</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref65">
        <label>65</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lazar</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Koehler</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Tanenbaum</surname>
              <given-names>TJ</given-names>
            </name>
            <name name-style="western">
              <surname>Nguyen</surname>
              <given-names>DH</given-names>
            </name>
          </person-group>
          <article-title>Why we use and abandon smart devices</article-title>
          <year>2015</year>
          <conf-name>Proceedings of the 2015 ACM International Joint Conference on Pervasive and Ubiquitous Computing</conf-name>
          <conf-date>September 7-11, 2015</conf-date>
          <conf-loc>Osaka</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2750858.2804288</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref66">
        <label>66</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bradley</surname>
              <given-names>MM</given-names>
            </name>
            <name name-style="western">
              <surname>Lang</surname>
              <given-names>PJ</given-names>
            </name>
          </person-group>
          <article-title>Affective norms for English text (ANET): affective ratings of text and instruction manual</article-title>
          <source>The Center for the Study of Emotion and Attention</source>
          <year>2017</year>
          <access-date>2022-04-11</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://csea.phhp.ufl.edu/media/anetmessage.html">https://csea.phhp.ufl.edu/media/anetmessage.html</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref67">
        <label>67</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lombard</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Snyder-Duch</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Bracken</surname>
              <given-names>CC</given-names>
            </name>
          </person-group>
          <article-title>Content Analysis in Mass Communication: Assessment and Reporting of Intercoder Reliability</article-title>
          <source>Human Comm Res</source>
          <year>2002</year>
          <month>10</month>
          <volume>28</volume>
          <issue>4</issue>
          <fpage>587</fpage>
          <lpage>604</lpage>
          <pub-id pub-id-type="doi">10.1111/j.1468-2958.2002.tb00826.x</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref68">
        <label>68</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Braun</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Clarke</surname>
              <given-names>V</given-names>
            </name>
          </person-group>
          <article-title>Using thematic analysis in psychology</article-title>
          <source>Qualitative Research in Psychology</source>
          <year>2006</year>
          <month>01</month>
          <volume>3</volume>
          <issue>2</issue>
          <fpage>77</fpage>
          <lpage>101</lpage>
          <pub-id pub-id-type="doi">10.1191/1478088706qp063oa</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref69">
        <label>69</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Landis</surname>
              <given-names>JR</given-names>
            </name>
            <name name-style="western">
              <surname>Koch</surname>
              <given-names>GG</given-names>
            </name>
          </person-group>
          <article-title>The Measurement of Observer Agreement for Categorical Data</article-title>
          <source>Biometrics</source>
          <year>1977</year>
          <month>03</month>
          <volume>33</volume>
          <issue>1</issue>
          <fpage>159</fpage>
          <pub-id pub-id-type="doi">10.2307/2529310</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref70">
        <label>70</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Elsden</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Selby</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Durrant</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Kirk</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Fitter, happier, more productive</article-title>
          <source>Interactions</source>
          <year>2016</year>
          <month>08</month>
          <day>23</day>
          <volume>23</volume>
          <issue>5</issue>
          <fpage>45</fpage>
          <lpage>45</lpage>
          <pub-id pub-id-type="doi">10.1145/2975388</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref71">
        <label>71</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rapp</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Tirassa</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>Know Thyself: A Theory of the Self for Personal Informatics</article-title>
          <source>Human–Computer Interaction</source>
          <year>2017</year>
          <month>04</month>
          <day>10</day>
          <volume>32</volume>
          <issue>5-6</issue>
          <fpage>335</fpage>
          <lpage>380</lpage>
          <pub-id pub-id-type="doi">10.1080/07370024.2017.1285704</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref72">
        <label>72</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Chong</surname>
              <given-names>MK</given-names>
            </name>
            <name name-style="western">
              <surname>Whittle</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Rashid</surname>
              <given-names>U</given-names>
            </name>
            <name name-style="western">
              <surname>Ang</surname>
              <given-names>CS</given-names>
            </name>
          </person-group>
          <article-title>Cue now, reflect later: a study of delayed reflection of diary events</article-title>
          <year>2015</year>
          <conf-name>Human-Computer Interaction-INTERACT</conf-name>
          <conf-date>September 14-18, 2015</conf-date>
          <conf-loc>Bamberg</conf-loc>
          <pub-id pub-id-type="doi">10.1007/978-3-319-22698-9_24</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref73">
        <label>73</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Baumer</surname>
              <given-names>EPS</given-names>
            </name>
            <name name-style="western">
              <surname>Khovanskaya</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Matthews</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Reynolds</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Sosik</surname>
              <given-names>VS</given-names>
            </name>
            <name name-style="western">
              <surname>Gay</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Reviewing reflection: on the use of reflection in interactive system design</article-title>
          <year>2014</year>
          <conf-name>Proceedings of the 2014 conference on Designing interactive systems</conf-name>
          <conf-date>June 21-25, 2014</conf-date>
          <conf-loc>Vancouver</conf-loc>
          <pub-id pub-id-type="doi">10.1145/2598510.2598598</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref74">
        <label>74</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Li</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Dey</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Forlizzi</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>A stage-based model of personal informatics systems</article-title>
          <year>2010</year>
          <conf-name>Proceedings of the SIGCHI Conference on Human Factors in Computing Systems</conf-name>
          <conf-date>April 10-15, 2010</conf-date>
          <conf-loc>Atlanta</conf-loc>
          <pub-id pub-id-type="doi">10.1145/1753326.1753409</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref75">
        <label>75</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Whooley</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Ploderer</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Gray</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>On the integration of self-tracking data amongst quantified self members</article-title>
          <year>2014</year>
          <conf-name>Proceedings of the 28th International BCS Human Computer Interaction Conference</conf-name>
          <conf-date>September 9-12, 2014</conf-date>
          <conf-loc>Southport</conf-loc>
          <pub-id pub-id-type="doi">10.14236/ewic/hci2014.19</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref76">
        <label>76</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hollis</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Konrad</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Springer</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Antoun</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Antoun</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Martin</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Whittaker</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>What Does All This Data Mean for My Future Mood? Actionable Analytics and Targeted Reflection for Emotional Well-Being</article-title>
          <source>Human–Computer Interaction</source>
          <year>2017</year>
          <month>03</month>
          <day>16</day>
          <volume>32</volume>
          <issue>5-6</issue>
          <fpage>208</fpage>
          <lpage>267</lpage>
          <pub-id pub-id-type="doi">10.1080/07370024.2016.1277724</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref77">
        <label>77</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Williams</surname>
              <given-names>K</given-names>
            </name>
          </person-group>
          <article-title>An Anxious Alliance</article-title>
          <source>AAHCC</source>
          <year>2015</year>
          <month>10</month>
          <day>05</day>
          <volume>1</volume>
          <issue>1</issue>
          <fpage>11</fpage>
          <pub-id pub-id-type="doi">10.7146/aahcc.v1i1.21146</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref78">
        <label>78</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bentley</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Tollmar</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Stephenson</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Levy</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Robertson</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Price</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Catrambone</surname>
              <given-names>R</given-names>
            </name>
            <name name-style="western">
              <surname>Wilson</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Health Mashups</article-title>
          <source>ACM Trans. Comput.-Hum. Interact</source>
          <year>2013</year>
          <month>11</month>
          <day>01</day>
          <volume>20</volume>
          <issue>5</issue>
          <fpage>1</fpage>
          <lpage>27</lpage>
          <pub-id pub-id-type="doi">10.1145/2503823</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref79">
        <label>79</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>SL</given-names>
            </name>
            <name name-style="western">
              <surname>Kelly</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Dealing With Information Overload in Multifaceted Personal Informatics Systems</article-title>
          <source>Human–Computer Interaction</source>
          <year>2017</year>
          <month>05</month>
          <day>16</day>
          <volume>33</volume>
          <issue>1</issue>
          <fpage>1</fpage>
          <lpage>48</lpage>
          <pub-id pub-id-type="doi">10.1080/07370024.2017.1302334</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref80">
        <label>80</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lieberman</surname>
              <given-names>MD</given-names>
            </name>
            <name name-style="western">
              <surname>Eisenberger</surname>
              <given-names>NI</given-names>
            </name>
            <name name-style="western">
              <surname>Crockett</surname>
              <given-names>MJ</given-names>
            </name>
            <name name-style="western">
              <surname>Tom</surname>
              <given-names>SM</given-names>
            </name>
            <name name-style="western">
              <surname>Pfeifer</surname>
              <given-names>JH</given-names>
            </name>
            <name name-style="western">
              <surname>Way</surname>
              <given-names>BM</given-names>
            </name>
          </person-group>
          <article-title>Putting feelings into words: affect labeling disrupts amygdala activity in response to affective stimuli</article-title>
          <source>Psychol Sci</source>
          <year>2007</year>
          <month>05</month>
          <volume>18</volume>
          <issue>5</issue>
          <fpage>421</fpage>
          <lpage>8</lpage>
          <pub-id pub-id-type="doi">10.1111/j.1467-9280.2007.01916.x</pub-id>
          <pub-id pub-id-type="medline">17576282</pub-id>
          <pub-id pub-id-type="pii">PSCI1916</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref81">
        <label>81</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Torre</surname>
              <given-names>JB</given-names>
            </name>
            <name name-style="western">
              <surname>Lieberman</surname>
              <given-names>MD</given-names>
            </name>
          </person-group>
          <article-title>Putting Feelings Into Words: Affect Labeling as Implicit Emotion Regulation</article-title>
          <source>Emotion Review</source>
          <year>2018</year>
          <month>03</month>
          <day>20</day>
          <volume>10</volume>
          <issue>2</issue>
          <fpage>116</fpage>
          <lpage>124</lpage>
          <pub-id pub-id-type="doi">10.1177/1754073917742706</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref82">
        <label>82</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Plutchik</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>The Nature of Emotions</article-title>
          <source>Am Sci</source>
          <year>2001</year>
          <volume>89</volume>
          <issue>4</issue>
          <fpage>344</fpage>
          <pub-id pub-id-type="doi">10.1511/2001.4.344</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref83">
        <label>83</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Scherer</surname>
              <given-names>KR</given-names>
            </name>
          </person-group>
          <article-title>What are emotions? And how can they be measured?</article-title>
          <source>Social Science Information</source>
          <year>2016</year>
          <month>06</month>
          <day>29</day>
          <volume>44</volume>
          <issue>4</issue>
          <fpage>695</fpage>
          <lpage>729</lpage>
          <pub-id pub-id-type="doi">10.1177/0539018405058216</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
