@conference {2023, title = {Know Thyself: Improving interoceptive ability through ambient biofeedback in the workplace}, booktitle = {SIGHCI 2023 Proceedings}, year = {2023}, month = {12/2023}, publisher = {Association for Information Systems}, organization = {Association for Information Systems}, address = {Hyderabad, India}, abstract = {Interoception, the perception of the body{\textquoteright}s internal state, is intimately connected to self-regulation and wellbeing. Grounded in the affective science literature, we design an ambient biofeedback system called Soni-Phy and a lab study to investigate whether, when and how an unobtrusive biofeedback system can be used to improve interoceptive sensibility and accuracy by amplifying a users{\textquoteright} internal state. This research has practical significance for the design and improvement of assistive technologies for the workplace. }, keywords = {assistive augmentation, biofeedback, Interoception}, url = {https://aisel.aisnet.org/sighci2023/3}, author = {Phoebe Chua and Kat Agres and Suranga Nanayakkara} } @conference {2022, title = {Computational Music Systems for Emotional Health and Wellbeing: A Review}, booktitle = {ACM Special Interest Group on Computer{\textendash}Human Interaction (SIGHCI) 2022}, year = {2022}, month = {12/2022}, abstract = {Music is a powerful stimulus, and both active and receptive methods of engaging with music provide affordances for improving physical, mental and social health. The emergence of sophisticated computational methods also underscores the potential for novel music technologies to address a wider range of wellbeing outcomes. In this review, we focus on describing the current state of the literature on computational approaches to music generation for health and wellbeing and identifying possible future directions for research in this area.}, author = {Phoebe Chua and Chitralekha Gupta and Kat R. Agres and Suranga Nanayakkara} } @article {2021, title = {Music, Computing, and Health: A roadmap for the current and future roles of music technology for health care and well-being}, journal = {Music \& Science}, year = {2021}, abstract = {The fields of music, health, and technology have seen significant interactions in recent years in developing music technology for health care and well-being. In an effort to strengthen the collaboration between the involved disciplines, the workshop {\textquoteleft}Music, Computing, and Health{\textquoteright} was held to discuss best practices and state-of-the-art at the intersection of these areas with researchers from music psychology and neuroscience, music therapy, music information retrieval, music technology, medical technology (medtech) and robotics. Following the discussions at the workshop, this paper provides an overview of the different methods of the involved disciplines and their potential contributions to developing music technology for health and well-being. Furthermore, the paper summarizes the state of the art in music technology that can be applied in various health scenarios and provides a perspective on challenges and opportunities for developing music technology that 1) supports person-centered care and evidence-based treatments, and 2) contributes to developing standardized, large-scale research on music-based interventions in an interdisciplinary manner. The paper provides a resource for those seeking to engage in interdisciplinary research using music-based computational methods to develop technology for health care, and aims to inspire future research directions by evaluating the state of the art with respect to the challenges facing each field.}, keywords = {Computing, Health, Music Information Retrieval, Music Psychology, Music Therapy, Roadmap, Technology}, doi = {https://doi.org/10.1177/2059204321997709}, author = {Agres, Kat R and Schaefer, Rebecca and Volk, Anja and van Hooren, Susan and Holzapfel, Andre and DallaBella, Simone and Mùˆller, Meinard and de Witte, Martina and Herremans, Dorien and Ramirez-Melendez, Rafael and Neerincx, Mark and Ruiz, Sebastian and Meredith, David and Dimitriadis, Theo and Magee, Wendy} } @conference {9998, title = {A dataset and classification model for Malay, Hindi, Tamil and Chinese music}, booktitle = {Workshop on Machine Learning and Music (MML 2020), at the European Conference on Machine Learning and Principles and Practice of Knowledge Discovery in Databases (ECML-PKDD) conference}, year = {2020}, abstract = {In this paper we present a new dataset, with musical excepts from the three main ethnic groups in Singapore: Chinese, Malay and Indian (both Hindi and Tamil). We use this new dataset to train different classification models to distinguish the origin of the music in terms of these ethnic groups. The classification models were optimized by exploring the use of different musical features as the input. Both high level features, i.e., musically meaningful features, as well as low level features, i.e., spectrogram based features, were extracted from the audio files so as to optimize the performance of the different classification models. }, url = {https://arxiv.org/abs/2009.04459}, author = {F Nahar and K Agres and B Balamurali and D Herremans} }