@article{Foerster2023a, title = {Working with Troubles and Failures in Conversation between Humans and Robots: Workshop Report}, author = {Förster, Frank and Romeo, Marta and Holthaus, Patrick and Wood, Luke Jai and Dondrup, Christian and Fischer, Joel E and Ferdousi Liza, Farhana and Kaszuba, Sara and Hough, Julian and Nesset, Birthe and Hernández García, Daniel and Kontogiorgios, Dimosthenis and Williams, Jennifer and Özkan, Elif Ecem and Barnard, Pepita and Berumen, Gustavo and Price, Dominic and Cobb, Sue and Witschko, Martina and Tisserand, Lucien and Porcheron, Martin and Giuliani, Manuel and Skantze, Gabriel and Healey, Patrick and Papaioannou, Ioannis and Gkatzia, Dimitra and Albert, Saul and Huang, Guanyu and Maraev, Vladislav and Epaminondas, Kapetanios}, journal = {Frontiers in Robotics and AI}, abstract = {This paper summarizes the structure and findings from the first Workshop on Troubles and Failures in Conversations between Humans and Robots. The workshop was organized to bring together a small, interdisciplinary group of researchers working on miscommunication from two complementary perspectives. One group of technology-oriented researchers was made up of roboticists, Human-Robot Interaction (HRI) researchers and dialogue system experts. The second group involved experts from conversation analysis, cognitive science, and linguistics. Uniting both groups of researchers is the belief that communication failures between humans and machines need to be taken seriously and that a systematic analysis of such failures may open fruitful avenues in research beyond current practices to improve such systems, including both speech-centric and multimodal interfaces. This workshop represents a starting point for this endeavour. The aim of the workshop was threefold:Firstly, to establish an interdisciplinary network of researchers that share a common interest in investigating communicative failures with a particular view towards robotic speech interfaces; secondly, to gain a partial overview of the "failure landscape" as experienced by roboticists and HRI researchers; and thirdly, to determine the potential for creating a robotic benchmark scenario for testing future speech interfaces with respect to the identified failures. The present article summarizes both the "failure landscape" surveyed during the workshop as well as the outcomes of the attempt to define a benchmark scenario.}, year = {2023}, volume = {10}, doi = {10.3389/frobt.2023.1202306} } @article{Schulz2021, author = {Schulz, Trenton and Soma, Rebekka and Holthaus, Patrick}, title = {{Movement Acts in Breakdown Situations - How a Robot's Recovery Procedure Affects Participants' Opinions}}, journal = {Paladyn, Journal of Behavioral Robotics: Special Issue Trust, Acceptance and Social Cues in Robot Interaction}, year = {2021}, number = {1}, volume = {12}, pages = {336--355}, abstract = {Recovery procedures are targeted at correcting issues encountered by robots. What are people's opinions of a robot during these recovery procedures? During an experiment that examined how a mobile robot moved, the robot would unexpectedly pause or rotate itself to recover from a navigation problem. The serendipity of the recovery procedure and people's understanding of it became a case study to examine how future study designs could consider breakdowns better and look at suggestions for better robot behaviors in such situations. We present the original experiment with the recovery procedure. We then examine the responses from the participants in this experiment qualitatively to see how they interpreted the breakdown situation when it occurred. Responses could be grouped into themes of sentience, competence, and the robot's forms. The themes indicate that the robot's movement communicated different information to different participants. This leads us to introduce the concept of movement acts to help examine the explicit and implicit parts of communication in movement. Given that we developed the concept looking at an unexpected breakdown, we suggest that researchers should plan for the possibility of breakdowns in experiments and examine and report people's experience around a robot breakdown to further explore unintended robot communication.}, doi = {10.1515/pjbr-2021-0027} } @article{Koay2021, author = {Kheng Lee Koay and Matt Webster and Clare Dixon and Paul Gainer and Dag Syrdal and Michael Fisher and Kerstin Dautenhahn}, doi = {doi:10.1515/pjbr-2021-0028}, title = {Use and usability of software verification methods to detect behaviour interference when teaching an assistive home companion robot: A proof-of-concept study}, journal = {Paladyn, Journal of Behavioral Robotics}, number = {1}, volume = {12}, year = {2021}, pages = {402--422}, abstract = {When studying the use of assistive robots in home environments, and especially how such robots can be personalised to meet the needs of the resident, key concerns are issues related to behaviour verification, behaviour interference and safety. Here, personalisation refers to the teaching of new robot behaviours by both technical and non-technical end users. In this article, we consider the issue of behaviour interference caused by situations where newly taught robot behaviours may affect or be affected by existing behaviours and thus, those behaviours will not or might not ever be executed. We focus in particular on how such situations can be detected and presented to the user. We describe the human–robot behaviour teaching system that we developed as well as the formal behaviour checking methods used. The online use of behaviour checking is demonstrated, based on static analysis of behaviours during the operation of the robot, and evaluated in a user study. We conducted a proof-of-concept human–robot interaction study with an autonomous, multi-purpose robot operating within a smart home environment. Twenty participants individually taught the robot behaviours according to instructions they were given, some of which caused interference with other behaviours. A mechanism for detecting behaviour interference provided feedback to participants and suggestions on how to resolve those conflicts. We assessed the participants’ views on detected interference as reported by the behaviour teaching system. Results indicate that interference warnings given to participants during teaching provoked an understanding of the issue. We did not find a significant influence of participants’ technical background. These results highlight a promising path towards verification and validation of assistive home companion robots that allow end-user personalisation.} } @article{Koay2020, author = {Kheng Lee Koay and Dag Sverre Syrdal and Kerstin Dautenhahn and Michael L. Walters}, doi = {10.1515/pjbr-2020-0003}, title = {A narrative approach to human-robot interaction prototyping for companion robots}, journal = {Paladyn, Journal of Behavioral Robotics}, number = {1}, volume = {11}, year = {2020}, pages = {66--85} } @article{Chanseau2019, author = {Adeline Chanseau and Kerstin Dautenhahn and Kheng Lee Koay and Michael L. Walters and Gabriella Lakatos and Maha Salem}, doi = {10.1515/pjbr-2019-0030}, title = {How does peoples’ perception of control depend on the criticality of a task performed by a robot}, journal = {Paladyn, Journal of Behavioral Robotics}, number = {1}, volume = {10}, year = {2019}, pages = {380--400} } @article{Rossi2018, author = {Alessandra Rossi and Kerstin Dautenhahn and Kheng Lee Koay and Michael L. Walters}, doi = {doi:10.1515/pjbr-2018-0010}, url = {https://doi.org/10.1515/pjbr-2018-0010}, title = {The impact of peoples’ personal dispositions and personalities on their trust of robots in an emergency scenario}, journal = {Paladyn, Journal of Behavioral Robotics}, number = {1}, volume = {9}, year = {2018}, pages = {137--154}, abstract = {Humans should be able to trust that they can safely interact with their home companion robot. However, robots can exhibit occasional mechanical, programming or functional errors. We hypothesise that the severity of the consequences and the timing of a robot’s different types of erroneous behaviours during an interaction may have different impacts on users’ attitudes towards a domestic robot. First, we investigated human users’ perceptions of the severity of various categories of potential errors that are likely to be exhibited by a domestic robot. Second, we used an interactive storyboard to evaluate participants’ degree of trust in the robot after it performed tasks either correctly, or with ‘small’ or ‘big’ errors. Finally, we analysed the correlation between participants’ responses regarding their personality, predisposition to trust other humans, their perceptions of robots, and their interaction with the robot. We conclude that there is correlation between the magnitude of an error performed by a robot and the corresponding loss of trust by the human towards the robot. Moreover we observed that some traits of participants’ personalities (conscientiousness and agreeableness) and their disposition of trusting other humans (benevolence) significantly increased their tendency to trust a robot more during an emergency scenario.} } @article{bedaf2017multi, title={A multi-perspective evaluation of a service robot for seniors: the voice of different stakeholders}, author={Bedaf, Sandra and Marti, Patrizia and Amirabdollahian, Farshid and de Witte, Luc}, journal={Disability and Rehabilitation: Assistive Technology}, pages={1--8}, year={2017}, publisher={Taylor \& Francis} } @article{Saunders2016, author = {Joe Saunders and Dag Syrdal and Kheng Lee Koay and Nathan Burke and Kerstin Dautenhahn}, title = {{`Teach Me - Show Me' - End-user personalisation of a smart home and companion robot}}, year = {2016}, journal = {IEEE Transactions on Human-Machine Systems}, volume = {46}, number = {1}, pages = {27--40}, doi = {10.1109/THMS.2015.2445105} } @article{Webster2015, author = {Matt Webster and Claire Dixon and Michael Fisher and Maha Salem and Joe Saunders and Kheng Lee Koay and Kerstin Dautenhahn and Joan Saez-Pons}, title = {{Towards Reliable Autonomous Robotic Assistants Through Formal Verification}}, year = {2015}, journal = {IEEE Transactions on Human-Machine Systems, Special Issue on Systematic Approaches to Human-Machine Interface}, volume = {46}, number = {2}, pages = {186--196}, doi = {10.1109/THMS.2015.2425139} } @article{Syrdal2015, author = {Dag Sverre Syrdal and Kerstin Dautenhahn and Wan Ching Ho and Kheng Lee Koay}, title = {{Integrating constrained experiments in long-term human-robot interactions using task– and scenario–based prototyping}}, year = {2015}, journal = {The Information Society, Special Issue "Beyond Industrial Robotics: Social Robots Entering Public and Domestic Spheres"}, volume = {31}, number = {3}, pages = {265--283}, doi = {10.1080/01972243.2015.1020212} } @article{Syrdal2014, author = {Dag Sverre Syrdal and Kerstin Dautenhahn and Kheng Lee Koay and Wan Ching Ho}, title = {{Views from within a narrative: Evaluating long-term human-robot interaction in a naturalistic environment using open-ended scenarios}}, year = {2014}, journal = {Cognitive Computation, Special Issue "The quest for modeling emotion, behaviour and context in socially believable Robots and ICT interfaces"}, volume = {6}, number = {4}, pages = {741--759}, doi = {10.1007/s12559-014-9284-x} } @article{Koay2014, author = {Kheng Lee Koay and Dag Sverre Syrdal and Mohammadreza Ashagari-Oskoei and Michael L. Walters and K. Dautenhahn}, title = {{Social Roles and Baseline Proxemic Preferences for a Domestic Service Robot}}, year = {2014}, journal = {International Journal of Social Robotics}, volume = {6}, number = {4}, pages = {469--488}, doi = {10.1007/s12369-014-0232-4} } @article{Amirabdollahian2013, author = {Farshid Amirabdollahian and Rieks op den Akke and Sandra Bedaf and Richard Bormann and Heather Draper and Vanessa Evers and Jorge Gallego Pérez and Gert Jan Gelderblom and Carolina Gutierrez Ruiz and David Hewson and Ninghang Hu and Kheng Lee Koay and Ben Kröse and Hagen Lehmann and Patrizia Marti and Hervé Michel and Hélène Prevot-Huille and Ulrich Reiser and Joe Saunders and Tom Sorell and Jelle Stienstra and Dag Syrdal and Michael Walters and Kerstin Dautenhahn}, title = {{Assistive technology design and development for acceptable robotics companions for ageing years.}}, year = {2013}, journal = {Paladyn: Journal of Behavioral Robotics}, volume = {4}, number = {2}, pages = {94--112}, doi = {10.2478/pjbr-2013-0007} } @article{Walters2011, author = {Michael L. Walters and Manja Lohse and Marc Hanheide and Britta Wrede and Kheng Lee Koay and Dag Sverre Syrdal and Anders Green and Helge Huttenrauch and Kerstin Dautenhahn and Gerhard Sagerer and Kerstin Severinson-Eklund}, title = {{Evaluating the behaviour of domestic robots using video-based studies}}, year = {2011}, journal = {Advanced Robotics}, volume = {25}, number = {18}, pages = {2233--2254}, doi = {10.1163/016918611X603800} } @article{Walters2008a, author = {Michael L. Walters and Dag S. Syrdal and Kerstin Dautenhahn and René te Boekhorst and Kheng Lee Koay}, title = {{Avoiding the uncanny valley: robot appearance, personality and consistency of behavior in an attention-seeking home scenario for a robot companion}}, year = {2008}, journal = {Autonomous Robots}, volume = {24}, number = {2}, pages = {159--178}, doi = {10.1007/s10514-007-9058-3} } @article{Dautenhahn2007a, author = {Dautenhahn, Kerstin}, title = {{Socially intelligent robots: dimensions of human - robot interaction}}, year = {2007}, journal = {Philosophical Transactions of the Royal Society B: Biological Sciences}, volume = {362}, number = {1480}, pages = {679--704}, doi = {10.1098/rstb.2006.2004} } @article{Dautenhahn2007b, author = {Dautenhahn, Kerstin}, title = {{Methodology and Themes of Human-Robot Interaction: A Growing Research Field}}, year = {2007}, journal = {International Journal of Advanced Robotic Systems}, volume = {4}, number = {1}, pages = {103--108}, doi = {10.5772/5702} } @article{Woods2007, author = {Sarah Woods and Kerstin Dautenhahn and Christina Kaouri and Rene te Boekhorst and Kheng Lee Koay and Michael L. Walters}, title = {{Are Robots Like People? - Relationships between Participant and Robot Personality Traits in Human-Robot Interaction Studies}}, year = {2007}, journal = {Interaction Studies}, volume = {8}, number = {2}, pages = {281--305}, doi = {10.1075/is.8.2.06woo} } @article{Walters2006, author = {M. L. Walters and K. Dautenhahn and S. N. Woods and K. L. Koay and R. te Boekhorst and D. Lee}, title = {{Exploratory Studies on Social Spaces between Humans and a Mechanical-looking Robot}}, year = {2006}, journal = {Connection Science: Special Issue "Android Science"}, volume = {18}, number = {4}, pages = {429--442}, doi = {10.1080/09540090600879513} } @incollection{Fujii2023, author = {Fujii, Koyo and Holthaus, Patrick and Samani, Hooman and Premachandra, Chinthaka and Amirabdollahian, Farshid}, title = {{Two-Level Reinforcement Learning Framework for Self-Sustained Personal Robots}}, booktitle = {International Conference on Social Robotics (ICSR 2023)}, editor = {Abdulaziz Al Ali and John-John Cabibihan and Nader Meskin and Silvia Rossi and Wanyue Jiang and Hongsheng He and Shuzhi Sam Ge}, series = {Lecture Notes in Computer Science}, volume = {14453}, address = {Doha, Qatar}, year = {2023}, pages = {363--372}, publisher = {Springer Singapore}, abstract = {As social robots become integral to daily life, effective battery management and personalized user interactions are crucial. We employed Q-learning with the Miro-E robot for balancing self-sustained energy management and personalized user engagement. Based on our approach, we anticipate that the robot will learn when to approach the charging dock and adapt interactions according to individual user preferences. For energy management, the robot underwent iterative training in a simulated environment, where it could opt to either "play" or "go to the charging dock". The robot thereby adapts its interaction style to a specific individual, learning which of three actions would be preferred based on feedback it would receive during real-world human-robot interactions. From an initial analysis, we identified a specific point at which the Q values are inverted, indicating the robot's potential establishment of a battery threshold that triggers its decision to head to the charging dock in the energy management scenario. Moreover, by monitoring the probability of the robot selecting specific behaviours during human-robot interactions over time, we expect to gather evidence that the robot can successfully tailor its interactions to individual users in the realm of personalized engagement.}, isbn = {978-981-99-8715-3}, doi = {10.1007/978-981-99-8715-3_30} } @inproceedings{Ayub2023b, author = {Ayub, Ali and Mehta, Jainish and Francesco, Zachary and Holthaus, Patrick and Dautenhahn, Kerstin and Nehaniv, Chrystopher}, title = {{How do Human Users Teach a Continual Learning Robot in Repeated Interactions?}}, booktitle = {International Conference on Robot and Human Interactive Communication (RO-MAN 2023)}, abstract = {Continual learning (CL) has emerged as an important avenue of research in recent years, at the intersection of Machine Learning (ML) and Human-Robot Interaction (HRI), to allow robots to continually learn in their environments over long-term interactions with humans. Most research in continual learning, however, has been robot-centered to develop continual learning algorithms that can quickly learn new information on static datasets. In this paper, we take a human-centered approach to continual learning, to understand how humans teach continual learning robots over the long term and if there are variations in their teaching styles. We conducted an in-person study with 40 participants that interacted with a continual learning robot in 200 sessions. In this between-participant study, we used two different CL models deployed on a Fetch mobile manipulator robot. An extensive qualitative and quantitative analysis of the data collected in the study shows that there is significant variation among the teaching styles of individual users indicating the need for personalized adaptation to their distinct teaching styles. The results also show that although there is a difference in the teaching styles between expert and non-expert users, the style does not have an effect on the performance of the continual learning robot. Finally, our analysis shows that the constrained experimental setups that have been widely used to test most continual learning techniques are not adequate, as real users interact with and teach continual learning robots in a variety of ways.}, address = {Busan, Korea}, year = {2023}, pages = {1975 -- 1982}, doi = {10.1109/RO-MAN57019.2023.10309520}, publisher = {IEEE}, } @incollection{Menon2023, author = {Menon, Catherine and Carta, Silvia and Förster, Frank and Holthaus, Patrick}, title = {{Improving Public Engagement with Ethical Complexities of Assistive Robots}}, booktitle = {Computer-Human Interaction Research and Applications 2021 and 2022, revised selected papers}, series = {Communications in Computer and Information Science}, editor = {Holzinger, Andreas and Plácido da Silva, Hugo and Vanderdonckt, Jean and Constantine, Larry}, year = {2023}, publisher = {Springer Cham}, volume = {1882}, pages = {71--94}, doi = {10.1007/978-3-031-41962-1_4} } @inproceedings{Shahabian2023b, author = {Shahabian Alashti, Mohamad Reza and Bamorovat Abadi, Mohammad Hossein and Holthaus, Patrick and Menon, Catherine and Amirabdollahian, Farshid}, title = {{Lightweight human activity recognition for ambient assisted living}}, booktitle = {The Sixteenth International Conference on Advances in Computer-Human Interactions (ACHI 2023)}, address = {Venice, Italy}, year = {2023}, publisher = {IARIA}, abstract = {Ambient Assisted Living (AAL) systems aim to improve the safety, comfort, and quality of life for the populations with specific attention given to prolonging personal independence during later stages of life. Human Activity Recognition (HAR) plays a crucial role in enabling AAL systems to recognise and understand human actions. Multi-view human activity recognition (MV-HAR) techniques are particularly useful for AAL systems as they can use information from multiple sensors to capture different perspectives of human activities and can help to improve the robustness and accuracy of activity recognition. In this work, we propose a lightweight activity recognition pipeline that utilizes skeleton data from multiple perspectives with the objective of enhancing an assistive robot's perception of human activity. The pipeline includes data sampling, spatial temporal data transformation, and representation and classification methods. This work contrasts a modified classic LeNet classification model (M-LeNet) versus a Vision Transformer (ViT) in detecting and classifying human activities. Both methods are evaluated using a multi-perspective dataset of human activities in the home (RHM-HAR-SK). Our results indicate that combining camera views can improve recognition accuracy. Furthermore, our pipeline provides an efficient and scalable solution in the AAL context, where bandwidth and computing resources are often limited.}, isbn = {978-1-68558-078-0}, pages = {188-193}, url = {https://www.thinkmind.org/index.php?view=article&articleid=achi_2023_4_200_20092} } @inproceedings{Shahabian2023a, author = {Shahabian Alashti, Mohamad Reza and Bamorovat Abadi, Mohammad Hossein and Holthaus, Patrick and Menon, Catherine and Amirabdollahian, Farshid}, title = {{RHM-HAR-SK: A Multiview Dataset with Skeleton Data for Ambient Assisted Living Research}}, booktitle = {The Sixteenth International Conference on Advances in Computer-Human Interactions (ACHI 2023)}, address = {Venice, Italy}, year = {2023}, publisher = {IARIA}, abstract = {Human and activity detection has always been a vital task in Human-Robot Interaction (HRI) scenarios, such as those involving assistive robots. In particular, skeleton-based Human Activity Recognition (HAR) offers a robust and effective detection method based on human biomechanics. Recent advancements in human pose estimation have made it possible to extract skeleton positioning data accurately and quickly using affordable cameras. In interaction with a human, robots can therefore capture detailed information from a close distance and flexible perspective. However, recognition accuracy is susceptible to robot movements, where the robot often fails to capture the entire scene. To address this we propose the adoption of external cameras to improve the accuracy of activity recognition on a mobile robot. In support of this proposal, we present the dataset RHM-HAR-SK that combines multiple camera perspectives augmented with human skeleton extraction obtained by the HRNet pose estimation. We apply qualitative and quantitative analysis to the extracted skeleton and its joints to evaluate the coverage of extracted poses per camera perspective and activity. Results indicate that the recognition accuracy for the skeleton varies between camera perspectives and also joints, depending on the type of activity. The RHM-HAR-SK dataset is available at https://robothouse-dev.herts.ac.uk/datasets/RHM/HAR-SK}, isbn = {978-1-68558-078-0}, pages = {181--187}, url = {https://www.thinkmind.org/index.php?view=article&articleid=achi_2023_4_190_20087} } @inproceedings{Bamorovat2023, author = {Bamorovat Abadi, Mohammad Hossein and Shahabian Alashti, Mohamad Reza and Holthaus, Patrick and Menon, Catherine and Amirabdollahian, Farshid}, title = {{RHM: Robot House Multi-view Human Activity Recognition Dataset}}, booktitle = {The Sixteenth International Conference on Advances in Computer-Human Interactions (ACHI 2023)}, address = {Venice, Italy}, year = {2023}, publisher = {IARIA}, abstract = {With the recent increased development of deep neural networks and dataset capabilities, the Human Action Recognition (HAR) domain is growing rapidly in terms of both the available datasets and deep models. Despite this, there are some lacks of datasets specifically covering the Robotics field and Human-Robot interaction. We prepare and introduce a new multi-view dataset to address this. The Robot House Multi-View (RHM) dataset contains four views: Front, Back, Ceiling (Omni), and robot-views. There are 14 classes with 6701 video clips for each view, making a total of 26804 video clips for the four views. The lengths of the video clips are between 1 to 5 seconds. The videos with the same number and the same classes are synchronized in different views. In the second part of this paper, we consider how single streams afford activity recognition using established state-of-the-art models. We then assess the affordance for each view based on information theoretic modelling and mutual information concept. Furthermore, we benchmark the performance of different views, thus establishing the strengths and weaknesses of each view relevant to their information content and performance of the benchmark. Our results lead us to conclude that multi-view and multi-stream activity recognition has the added potential to improve activity recognition results. The RHM dataset is available at https://robothouse-dev.herts.ac.uk/datasets/RHM/HAR-1/}, isbn = {978-1-68558-078-0}, pages = {159--166}, url = {https://www.thinkmind.org/index.php?view=article&articleid=achi_2023_4_160_20077} } @inproceedings{Bagchi2023, author = {Bagchi, Shelly and Holthaus, Patrick and Beraldo, Gloria and Senf, Emmanuel and Hernández García, Daniel and Han, Zhao and Jayaraman, Suresh Kumaar and Rossi, Alessandra and Esterwood, Connor and Andriella, Antonio and Pridham, Paul}, title = {{Towards Improved Replicability of Human Studies in Human-Robot Interaction: Recommendations for Formalized Reporting}}, booktitle = {International Conference on Human-Robot Interaction (HRI 2023)}, address = {Stockholm, Sweden}, year = {2023}, pages = {629--633}, publisher = {ACM/IEEE}, abstract = {In this paper, we present a proposed format for reporting human studies in Human-Robot Interaction (HRI). We specifically call out details which are often overlooked or left out of conference and journal papers due to space constraints, and propose a standardized format to contain those details in paper appendices. We expect that providing a formalized study reporting method will promote an increase in replicability and reproducibility of HRI studies, and encourage meta-analysis and review, ultimately increasing the generalizability and validity of HRI research. We consider our draft the first step towards these goals, and we release it to solicit feedback from the HRI community on the included topics.}, doi = {10.1145/3568294.3580162} } @inproceedings{Holthaus2023, author = {Holthaus, Patrick and Schulz, Trenton and Lakatos, Gabriella and Soma, Rebekka}, title = {{Communicative Robot Signals: Presenting a New Typology for Human-Robot Interaction}}, booktitle = {International Conference on Human-Robot Interaction (HRI 2023)}, address = {Stockholm, Sweden}, year = {2023}, publisher = {ACM/IEEE}, abstract = {We present a new typology for classifying signals from robots when they communicate with humans. For inspiration, we use ethology, the study of animal behaviour and previous efforts from literature as guides in defining the typology. The typology is based on communicative signals that consist of five properties: the origin where the signal comes from, the deliberateness of the signal, the signal's reference, the genuineness of the signal, and its clarity (i.e. how implicit or explicit it is). Using the accompanying worksheet, the typology is straightforward to use to examine communicative signals from previous human-robot interactions and provides guidance for designers to use the typology when designing new robot behaviours.}, tags = {conference; selected; open; doi-open; best paper candidate}, doi = {10.1145/3568162.3578631}, pages = {132--141}, supplements = {Holthaus2023-worksheet.pdf; https://www.youtube.com/embed/wdHmkMewUHI} } @incollection{Riches2022, author = {Riches, Lewis and Koay, Kheng Lee and Holthaus, Patrick}, title = {{Classification of personal data used by personalised robot companions based on concern of exposure}}, booktitle = {International Conference on Social Robotics (ICSR 2022)}, editor = {Cavallo, Filippo and Cabibihan, John-John and Fiorini, Laura and Sorrentino, Alessandra and He, Hongsheng and Liu, Xiaorui and Matsumoto, Yoshio and Ge, Shuzhi Sam}, series = {Lecture Notes in Computer Science}, volume = {13817}, address = {Florence, Italy}, year = {2022}, publisher = {Springer Cham}, abstract = {We present a paper looking at the accidental exposure of personal data by personalised companion robots in human-robot interaction. Due to the need for personal data, personalisation brings inherent risk of accidental personal data exposure through multi-modal communication. An online questionnaire was conducted to collect perceptions on the level of concern of personal data being exposed. The personal data examined in this paper has been used to personalise a companion robot along with links to the UK general data protection act. The level of concern for these personal data has been classified into high, medium and low concern with guidelines provided on how these different classifications should be handled by a robot. Evidence has also been found that age, gender, extroversion and conscientiousness influence a person's perceptions on personal data exposure concern.}, doi = {10.1007/978-3-031-24667-8_21}, isbn = {978-3-031-24667-8}, pages = {228--237} } @inproceedings{BamorovatAbadi2021a, author = "Bamorovat Abadi, Mohammad Hossein and Shahabian Alashti, Mohamad Reza and Holthaus, Patrick and Menon, Catherine and Amirabdollahian, Farshid", title = "Affordable Robot Mapping using Omnidirectional Vision", booktitle = "Proceedings of the 4th UK-RAS Conference: Robotics at Home (UKRAS21)", address = "Hatfield, UK", year = "2021", abstract = "Mapping is a fundamental requirement for robot navigation. In this paper, we introduce a novel visual mapping method that relies solely on a single omnidirectional camera. We present a metric that allows us to generate a map from the input image by using a visual Sonar approach. The combination of the visual sonars with the robot's odometry enables us to determine a relation equation and subsequently generate a map that is suitable for robot navigation. Results based on visual map comparison indicate that our approach is comparable with the established solutions based on RGB-D cameras or laser-based sensors. We now embark on evaluating our accuracy against the established methods.", pages = "29--30", doi = "10.31256/If7Nm5Z" } @inproceedings{BamorovatAbadi2021b, author = "Bamorovat Abadi, Mohammad Hossein and Shahabian Alashti, Mohamad Reza and Holthaus, Patrick and Menon, Catherine and Amirabdollahian, Farshid", title = "Robot House Human Activity Recognition Dataset", booktitle = "Proceedings of the 4th UK-RAS Conference: Robotics at Home (UKRAS21)", address = "Hatfield, UK", year = "2021", abstract = "Human activity recognition is one of the most challenging tasks in computer vision. State-of-the art approaches such as deep learning techniques thereby often rely on large labelled datasets of human activities. However, currently available datasets are suboptimal for learning human activities in companion robotics scenarios at home, for example, missing crucial perspectives. With this as a consideration, we present the University of Hertfordshire Robot House Human Activity Recognition Dataset (RH-HAR-1). It contains RGB videos of a human engaging in daily activities, taken from four different cameras. Importantly, this dataset contains two non-standard perspectives: a ceiling-mounted fisheye camera and a mobile robot's view. In the first instance, RH-HAR-1 covers five daily activities with a total of more than 10,000 videos.", pages = "19--20", doi = "10.31256/Bw7Kt2N" } @inproceedings{ShahabianAlashti2021, author = "Shahabian Alashti, Mohamad Reza and Bamorovat Abadi, Mohammad Hossein and Holthaus, Patrick and Menon, Catherine and Amirabdollahian, Farshid", title = "Human activity recognition in RoboCup@home: Inspiration from online benchmarks", booktitle = "Proceedings of the 4th UK-RAS Conference: Robotics at Home (UKRAS21)", address = "Hatfield, UK", year = "2021", abstract = "Human activity recognition is an important aspect of many robotics applications. In this paper, we discuss how well the RoboCup@home competition accounts for the importance of such recognition algorithms. Using public benchmarks as an inspiration, we propose to add a new task that specifically tests the performance of human activity recognition in this league. We suggest that human-robot interaction research in general can benefit from the addition of such a task as RoboCup@home is considered to accelerate, regulate, and consolidate the field.", pages = "27--28", doi = "10.31256/Os6Aw4Y" } @inproceedings{Rossi2020roman, title={How Social Robots Influence People's Trust in Critical Situations}, author={Rossi, Alessandra and Dautenhahn, Kerstin and Koay, Kheng Lee and Walters, Michael L}, booktitle={International Conference on Robot and Human Interactive Communication (RO-MAN)}, pages={1020--1025}, year={2020}, publisher={IEEE}, doi={10.1109/RO-MAN47096.2020.9223471} } @incollection{Rossi2020, author = {Rossi, Alessandra and Dautenhahn, Kerstin and Koay, Kheng Lee and Walters, Michael L. and Holthaus, Patrick}, editor = {Wagner, Alan R. and Feil-Seifer, David and Haring, Kerstin S. and Rossi, Silvia and Williams, Thomas and He, Hongsheng and Ge, Shuzhi Sam}, title = {{Evaluating people's perceptions of trust in a robot in a repeated interactions study}}, booktitle = {International Conference on Social Robotics (ICSR 2020)}, address = {Golden, Colorado}, series = {Lecture Notes in Computer Science}, year = {2020}, volume = {12483}, publisher = {Springer Cham}, doi = {10.1007/978-3-030-62056-1_38}, isbn = {978-3-030-62056-1}, abstract = {Trust has been established to be a key factor in fostering human-robot interactions. However, trust can change overtime according different factors, including a breach of trust due to a robot's error. In this study, we observed people's interactions with a companion robot in a real house adapted for human-robot interaction experimentation over three weeks. The interactions happened in six day-scenarios in which a robot performed different tasks under two different conditions. Each condition included fourteen tasks performed by the robot, either correctly, or with errors with severe consequences on the first or last day of interaction. At the end of each experimental condition, participants were presented with an emergency scenario to evaluate their trust in the robot. We evaluated participants' trust in the robot by observing their decision to trust the robot during the emergency scenario, and by collecting their consideration through questionnaires. We concluded that there is a correlation between the timing of an error with severe consequences performed by the robot and the corresponding loss of trust of the human in the robot. In particular, people's trust is subjected to the initial mental formation.} } @incollection{Holthaus2019, author = {Holthaus, Patrick and Menon, Catherine and Amirabdollahian, Farshid}, title = {{How a Robot's Social Credibility Affects Safety Performance}}, booktitle = {International Conference on Social Robotics (ICSR 2019)}, editor = {Salichs, Miguel A. and Ge, Shuzhi Sam and Barakova, Emilia Ivanova and Cabibihan, John-John and Wagner, Alan R. and Castro-Gonz{\'a}lez, {\'A}lvaro and He, Hongsheng}, address = {Madrid, Spain}, series = {Lecture Notes in Computer Science}, year = {2019}, publisher = {Springer Cham}, pages = {740--749}, abstract = {This paper connects the two domains of HRI (Human-Robot Interaction) and safety engineering to ensure that the design of interactive robots considers an effect of social behaviours on safety functionality. We conducted a preliminary user study with a social robot that alerts participants during a puzzle-solving task to an environmental hazard. Our study findings show an indicative trend that users who were interrupted by a socially credible robot are more likely to act and mitigate the hazard than users interrupted by a robot lacking social credibility.}, doi = {10.1007/978-3-030-35888-4_69}, isbn = {978-3-030-35888-4}, volume = {11876} } @inproceedings{Schulz2019a, author = {Schulz, Trenton and Holthaus, Patrick and Amirabdollahian, Farshid and Koay, Kheng Lee and Torresen, Jim and Herstad, Jo}, title = {{Differences of Human Perceptions of a Robot Moving using Linear or Slow in, Slow out Velocity Profiles When Performing a Cleaning Task (in press)}}, booktitle = {International Conference on Robot and Human Interactive Communication (RO-MAN 2019)}, address = {New Delhi, India}, year = {2019}, publisher = {IEEE}, abstract = {We investigated how a robot moving with different velocity profiles affects a person’s perception of it when working together on a task. The two profiles are the standard linear profile and a profile based on the animation principles of slow in, slow out. The investigation was accomplished by running an experiment in a home context where people and the robot cooperated on a clean-up task. We used the Godspeed series of questionnaires to gather people’s perception of the robot. Average scores for each series appear not to be different enough to reject the null hypotheses, but looking at the component items provides paths to future areas of research. We also discuss the scenario for the experiment and how it may be used for future research into using animation techniques for moving robots and improving the legibility of a robot’s locomotion.}, doi = {10.1109/RO-MAN46459.2019.8956355}, issn = {1944-9445}, } @inproceedings{Schulz2019b, author = {Schulz, Trenton and Holthaus, Patrick and Amirabdollahian, Farshid and Koay, Kheng Lee}, title = {{Humans' Perception of a Robot Moving Using a Slow in and Slow Out Velocity Profile}}, booktitle = {International Conference on Human-Robot Interaction (HRI 2019)}, address = {Daegu, South Korea}, year = {2019}, pages={594-595}, publisher = {ACM/IEEE}, abstract = {Humans need to understand and trust the robots they are working with. We hypothesize that how a robot moves can impact people’s perception and their trust. We present a methodology for a study to explore people’s perception of a robot using the animation principle of slow in, slow out—to change the robot’s velocity profile versus a robot moving using a linear velocity profile. Study participants will interact with the robot within a home context to complete a task while the robot moves around the house. The participants’ perceptions of the robot will be recorded using the Godspeed Questionnaire. A pilot study shows that it is possible to notice the difference between the linear and the slow in, slow out velocity profiles, so the full experiment planned with participants will allow us to compare their perceptions based on the two observable behaviors.}, doi = {10.1109/HRI.2019.8673239}, } @inproceedings{Menon2019a, author = {Menon, Catherine and Holthaus, Patrick}, title = {{Does a Loss of Social Credibility Impact Robot Safety? Balancing social and safety behaviours of assistive robots}}, booktitle = {International Conference on Performance, Safety and Robustness in Complex Systems and Applications (PESARO 2019)}, address = {Valencia, Spain}, year = {2019}, pages = {18--24}, publisher = {IARIA}, abstract = {This position paper discusses the safety-related functions performed by assistive robots and explores the re-lationship between trust and effective safety risk mitigation. We identify a measure of the robot’s social effectiveness, termed social credibility, and present a discussion of how social credibility may be gained and lost. This paper’s contribution is the identification of a link between social credibility and safety-related performance. Accordingly, we draw on analyses of existing systems to demonstrate how an assistive robot’s safety-critical functionality can be impaired by a loss of social credibility. In addition, we present a discussion of some of the consequences of prioritising either safety-related functionality or social engagement. We propose the identification of a mixed-criticality scheduling algorithm in order to maximise both safety-related performance and social engagement.}, isbn = {978-1-61208-698-9}, url = {https://www.thinkmind.org/index.php?view=article&articleid=pesaro_2019_2_10_60021} } @InProceedings{Rossi2019, author="Rossi, Alessandra and Garcia, Fernando and Maya, Arturo Cruz and Dautenhahn, Kerstin and Koay, Kheng Lee and Walters, Michael L. and Pandey, Amit K.", editor="Althoefer, Kaspar and Konstantinova, Jelizaveta and Zhang, Ketao", title="Investigating the Effects of Social Interactive Behaviours of a Robot on People's Trust During a Navigation Task", booktitle="Towards Autonomous Robotic Systems", year="2019", publisher="Springer International Publishing", address="Cham", pages="349--361", abstract="Identifying the roles and the specific social behaviours that evoke human trust towards robots is key for user acceptance. Specially, while performing tasks in the real world, such as navigation or guidance, the predictability of robot motion and predictions of user intentions facilitate interaction. We present a user study in which a humanoid-robot guided participants around a human populated environment, avoiding collisions while following a socially acceptable trajectory. We investigated which behaviours performed by a humanoid robot during a guidance task exhibited better social acceptance by people, and how robot behaviours influence their trust in a robot to safely complete a guiding task. We concluded that in general, people prefer and trust a robot that exhibits social behaviours such as talking and maintaining an appropriate safe distance from obstacles.", isbn="978-3-030-23807-0" } @inproceedings{Rossi2018a, author = {Rossi, Alessandra and Holthaus, Patrick and Dautenhahn, Kerstin and Koay, Kheng Lee and Walters, Michael L.}, title = {{Getting to know Pepper: Effects of people's awareness of a robot's capabilities on their trust in the robot}}, booktitle = {International Conference on Human-Agent Interaction (HAI 2018)}, address = {Southampton, UK}, year = {2018}, publisher = {ACM}, doi = {10.1145/3284432.3284464}, abstract = {This work investigates how human awareness about a social robot's capabilities is related to trusting this robot to handle different tasks. We present a user study that relates knowledge on different quality levels to participant's ratings of trust. Secondary school pupils were asked to rate their trust in the robot after three types of exposures: a video demonstration, a live interaction, and a programming task. The study revealed that the pupils' trust is positively affected across different domains after each session, indicating that human users trust a robot more the more awareness about the robot they have.} } @inproceedings{Chanseau2018a, author = {Chanseau, Adeline and Dautenhahn, Kerstin and Walters, Michael Leonard and Koay, Kheng Lee and Lakatos, Gabriella and Salem, Maha}, title = {{Does the Appearance of a Robot Influence People's Perception of Task Criticality?}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Nanjing, China}, year = {2018}, publisher = {IEEE} } @inproceedings{Chanseau2017a, author = {Chanseau, Adeline and Dautenhahn, Kerstin and Walters, Michael Leonard and Lakatos, Gabriella and Koay, Kheng Lee and Salem, Maha}, title = {{People’s Perceptions of Task Criticality and Preferences for Robot Autonomy}}, booktitle = {UK Conference on Robots and Autonomous Systems (UK-RAS)}, address = {London, UK}, year = {2017} } @inproceedings{Rossi2017a, author = {Alessandra Rossi and Kerstin Dautenhahn and Kheng Lee Koay and Michael L. Walters}, title = {{Human Perceptions of the Severity of Domestic Robot Errors}}, booktitle = {International Conference on Social Robotics (ICSR)}, address = {Tsukuba, Japan}, year = {2017}, publisher = {Springer} } @inproceedings{Rossi2017b, author = {Alessandra Rossi and Kerstin Dautenhahn and Kheng Lee Koay and Michael L. Walters}, title = {{A Study on How the Timing and Magnitude of Robot Errors May Influence People Trust of Robots in an Emergency Scenario}}, booktitle = {International Conference on Social Robotics (ICSR)}, address = {Tsukuba, Japan}, year = {2017}, publisher = {Springer} } @inproceedings{Koay2017, author = {Kheng Lee Koay and Dag Syrdal and Richard Bormann and Joe Saunders and Michael L. Walters and Kerstin Dautenhahn}, title = {{Initial Design, Implementation and Technical Evaluation of a Context-Aware Proxemics Planner for a Social Robot}}, booktitle = {International Conference on Social Robotics (ICSR)}, address = {Tsukuba, Japan}, year = {2017}, publisher = {Springer} } @inproceedings{Chanseau2016, author = {Adeline Chanseau and Kerstin Dautenhahn and Kheng Lee Koay and Maha Salem}, title = {{Who is in charge? Sense of control and robot anxiety in Human-Robot Interaction}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Teachers College, Columbia University, USA}, year = {2016}, pages = {743--748}, publisher = {IEEE} } @inproceedings{Koay2016, author = {Kheng Lee Koay and Dag Sverre Syrdal and Wan Ching Ho and Kerstin Dautenhahn}, title = {{Prototyping Realistic Long-Term Human-Robot Interaction for the Study of Agent Migration}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Teachers College, Columbia University, USA}, year = {2016}, pages = {809--816}, publisher = {IEEE} } @inproceedings{Heffernan2016, author = {Rory O. Heffernan and Michael L. Walters and Neil R. Davey and Rene te Boekhorst and Kheng Lee Koay and Kerstin Dautenhahn}, title = {{Adaptive Smart Environments: Detecting human behaviour from multimodal observation}}, booktitle = {Interactionl Conference on Advances in Computer-Human Interactions (ACHI)}, address = {Venice, Italy}, year = {2016}, pages = {353--358} } @inproceedings{Salem2015, author = {Maha Salem and Gabriella Lakatos and Farshid Amirabdollahian and Kerstin Dautenhahn}, title = {{Would You Trust a (Faulty) Robot?: Effects of Error, Task Type and Personality on Human-Robot Cooperation and Trust}}, booktitle = {International Conference on Human-Robot Interaction (HRI)}, year = {2015}, publisher = {ACM/IEEE}, isbn = {978-1-4503-2883-8}, address = {Portland, Oregon, USA}, pages = {141--148}, } doi = {10.1145/2696454.2696497}, @InProceedings{Draper2014, author={Draper, Heather and Sorell, Tom and Bedaf, Sandra and Syrdal, Dag Sverre and Gutierrez-Ruiz, Carolina and Duclos, Alexandre and Amirabdollahian, Farshid}, title={{Ethical Dimensions of Human-Robot Interactions in the Care of Older People: Insights from 21 Focus Groups Convened in the UK, France and the Netherlands}}, booktitle={International Conference on Social Robotics (ICSR)}, year={2014}, publisher={Springer}, address={Sydney, Australia}, pages={135--145} } @inproceedings{Koay2013a, author = {Kheng Lee Koay and Michael L. Walters and Alex May and Anna Dumitriu and Bruce Christianson and Nathan Burke and Kerstin Dautenhahn}, title = {{Exploring Robot Etiquette: Refining a HRI Home Companion Scenario Based on Feedback from Two Artists Who Lived with Robots in the UH Robot House}}, booktitle = {International Conference on Social Robotics (ICSR)}, address = {Bristol, UK}, year = {2013}, pages = {}, publisher = {Springer} } @inproceedings{Lehmann2013, author = {Hagen Lehmann and Mick L. Walters and Anna Dumitriu and Alex May and Kheng Lee Koay and Joan Saez and Dag Sverre Syrdal and Luke Wood and Joe Saunders and Nathan Burke and Ismael Duque and Bruce Christianson and Kerstin Dautenhahn}, title = {{Artists as HRI Pioneers: A Creative Approach to Developing Novel Interactions for Living with Robots}}, booktitle = {International Conference on Social Robotics (ICSR)}, address = {Bristol, UK}, year = {2013}, pages = {}, publisher = {Springer} } @inproceedings{Amirabdollahian2013a, author = {Farshid Amirabdollahian and Kerstin Dautenhahn and Clare Dixon and Kerstin Eder and Michael Fisher and Kheng L. Koay and Evgeni Magid and Anthony Pipe and Maha Salem and Joe Saunders and Matt Webster}, title = {{Can You Trust Your Robotic Assistant?}}, booktitle = {International Conference on Social Robotics (ICSR)}, address = {Bristol, UK}, year = {2013}, pages = {}, publisher = {Springer} } @inproceedings{Saunders2013, author = {Joe Saunders and Nathan Burke and Kheng Lee Koay and Kerstin Dautenhahn}, title = {{A User Friendly Robot Architecture for Re-ablement and Co-learning in A Sensorised Home}}, booktitle = {European AAATE (Associated for the Advancement of Assistive Technology in Europe) Conference}, address = {Vilamoura, Portugal}, year = {2013}, pages = {} } @inproceedings{Amirabdollahian2013b, author = {Farshid Amirabdollahian and Rieks Op Den Akker and Sandra Bedaf and Richard Bormann and Heather Draper and Gert Jan Gelderblom and Carolina Gutierrez Ruiz and David Hewson and Kheng Lee Koay and Ben Krose and Patrizia Marti and Helene Prevot-Huille and Ulrich Reiser and Joe Saunders and Tom Sorell and Kerstin Dautenhahn}, title = {{Accompany: Acceptable robotiCs COMPanions for AgeiNg Years - Multidimensional Aspects of Human-System Interactions}}, booktitle = {International Conference on Human System Interaction}, address = {Sopot, Poland}, year = {2013}, pages = {}, publisher = {IEEE} } @inproceedings{Duque2013a, author = {Ismael Duque and Kerstin Dautenhahn and Kheng Lee Koay and lan Willcock and Bruce Christianson}, title = {{A different approach of using personas in human-robot interaction: Integrating personas as computational models to modify robot companions' behaviour}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Gyeongju, South Korea}, year = {2013}, pages = {}, publisher = {IEEE} } @inproceedings{Duque2013b, author = {Ismael Duque and Kerstin Dautenhahn and Kheng Lee Koay and lan Willcock and Bruce Christianson}, title = {{Knowledge-driven User Activity Recognition for a Smart House - Development and Validation of a Generic and Low-Cost, Resource-Efficient System}}, booktitle = {International Conference on Advances in Computer-Human Interactions (AHCI)}, address = {Nice, France}, year = {2013}, pages = {141--146}, publisher = {} } @inproceedings{Walters2013a, author = {Michael Leonard Walters and Kheng Lee Koay and Dag Sverre Syrdal and Anne Campbell and Kerstin Dautenhahn}, title = {{Companion robots for elderly people: using theatre to investigate potential users' views}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Gyeongju, South Korea}, year = {2013}, pages = {}, publisher = {IEEE} } @inproceedings{Walters2013b, author = {Michael L. Walters and Samuel Marcos and Dag S. Syrdal and Kerstin Dautenhahn}, title = {{An Interactive Game with a Robot: Peoples' Perceptions of Robot Faces and a Gesture-Based User Interface}}, booktitle = {International Conference on Advances in Computer-Human Interactions (AHCI)}, address = {Nice, France}, year = {2013}, pages = {123--128} } @inproceedings{Koay2013b, author = {Kheng Lee Koay and Gabriella Lakatos and Dag Sverre Syrdal and Marta Gacsi and Boroka Bereczky and Kerstin Dautenhahn and Adam Miklosi and Michael Walters}, title = {{Hey! There is someone at your door. A Hearing Robot using Visual Communication Signals of Hearing Dogs to Communicate Intent}}, booktitle = {Symposium on Artificial Life}, address = {Singapore}, year = {2013}, pages = {}, publisher = {IEEE} } @inproceedings{Syrdal2011, author = {Dag Sverre Syrdal and Kerstin Dautenhahn and Michael Leonard Walters and Kheng Lee Koay and Nuno Otero}, title = {{The Theatre methodology for facilitating discussion in human-robot interaction on information disclosure in a home environment}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Atlanta, Georgia, USA}, year = {2011}, pages = {479--484}, publisher = {IEEE} } @inproceedings{Deshmukh2011, author = {Amol A. Deshmukh and Mei Yii Lim and Michael Kriegel and Ruth Aylett and Kyron Du-Casse and Koay Kheng Lee and Kerstin Dautenhahn}, title = {{Managing Social Constraints on Recharge Behaviour for Robot Companions Using Memor}}, booktitle = {International Conference on Human-Robot Interaction (HRI)}, address = {Lausanne, Switzerland}, year = {2011}, publisher = {ACM/IEEE}, url = {}, doi = {} } @inproceedings{Syrdal2010a, author = {Dag Sverre Syrdal and Kheng Lee Koay and Marta Gacsi and Michael Leonard Walters and Kerstin Dautenhahn}, title = {{Video Prototyping of Dog-Inspired Non-verbal Affective Communication for an Appearance Constrained Robot}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Viareggio, Italy}, year = {2010}, pages = {632--637}, publisher = {IEEE} } @inproceedings{Syrdal2010b, author = {Dag Sverre Syrdal and Kerstin Dautenhahn and Kheng Lee Koay and Michael Leonard Walters and Nuno Otero}, title = {{Exploring Human Mental Models of Robots through Explicitation Interviews}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Viareggio, Italy}, year = {2010}, pages = {638--645}, publisher = {IEEE} } @inproceedings{Casse2009, author = {Kyron Du Casse and Kheng Lee Koay and Wan Ching Ho and Kerstin Dautenhahn}, title = {{Reducing the cost of robotics software: SAMGAR, a generic modular robotic software communication architecture}}, booktitle = {International Conference on Advanced Robotics (ICAR)}, address = {Munich, Germany}, year = {2009} } @inproceedings{Koay2009, author = {Kheng Lee Koay and Dag Sverre Syrdal and Michael L. Walters and Kerstin Dautenhahn}, title = {{A User Study on Visualization of Agent Migration between Two Companion Robots}}, booktitle = {International Conference on Human-Computer Interaction (HCI)}, address = {San Diego, CA, USA}, year = {2009} } @inproceedings{Walters2009a, author = {M. L. Walters and K. Dautenhahn and R. te Boekhorst and K. L. Koay}, title = {{An Empirical Framework for Human Robot Proximity}}, booktitle = {New Frontiers in Human-Robot Interaction, a symposium at the AISB2009 Convention}, address = {Edinburgh, Scotland}, year = {2009}, pages = {144--149}, publisher = {SSAISB} } @inproceedings{Walters2009b, author = {M. L. Walters and D. S. Syrdal and K. L. Koay and K. Dautenhahn and R. te Boekhorst}, title = {{Human Preferences and Perceptions of Robot Appearances}}, booktitle = {New Frontiers in Human-Robot Interaction, a symposium at the AISB2009 Convention}, address = {Edinburgh, Scotland}, year = {2009}, pages = {136--143}, publisher = {SSAISB} } @inproceedings{Syrdal2009a, author = {Dag Sverre Syrdal and Kerstin Dautenhahn and Kheng Lee Koay and Michael L. Walters}, title = {{The Negative Attitudes towards Robots Scale and Reactions to Robot Behaviour in a Live Human-Robot Interaction Study}}, booktitle = {New Frontiers in Human-Robot Interaction, a symposium at the AISB2009 Convention}, address = {Edinburgh, Scotland}, year = {2009}, pages = {109--115}, publisher = {SSAISB} } @inproceedings{Syrdal2009b, author = {Dag Sverre Syrdal and Kheng Lee Koay and Michael L. Walters and Kerstin Dautenhahn}, title = {{"The boy-robot should bark!" – Children’s Impressions of Agent Migration into Diverse Embodiments}}, booktitle = {New Frontiers in Human-Robot Interaction, a symposium at the AISB2009 Convention}, address = {Edinburgh, Scotland}, year = {2009}, pages = {116--121}, publisher = {SSAISB} } @inproceedings{Walters2008b, author = {M. L. Walters and D. S. Syrdal and K. L. Koay and K. Dautenhahn and R. te Boekhorst}, title = {{Human Approach Distances to a Mechanical-Looking Robot with Different Robot Voice Styles}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Munich, Germany}, year = {2008}, publisher = {IEEE} } @inproceedings{Lohse2008, author = {Manja Lohse and Marc Hanheide and Britta Wrede and Michael L. Walters and Kheng Lee Koay and Dag Sverre Syrdal and Anders Green and Helge Hüttenrauch and Kerstin Dautenhahn and Gerhard Sagerer and Kerstin Severinson-Eklundh}, title = {{Evaluating extrovert and introvert behaviour of a domestic robot - a video study}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Munich, Germany}, year = {2008}, publisher = {IEEE} } @inproceedings{Otero2008, author = {N. R. Otero and A. Alissandrakis and K. Dautenhahn and C. L. Nehaniv and D. S. Syrdal and K. L. Koay}, title = {{Human to Robot Demonstrations of Routine Home Tasks: Exploring the Role of the Robot's Feedback}}, booktitle = {International Conference on Human-Robot Interaction (HRI)}, address = {Amsterdam, Netherlands}, year = {2008}, publisher = {ACM/IEEE} } @inproceedings{Syrdal2008, author = {D. S. Syrdal and K. Dautenhahn and M. L. Walters and K. L Koay}, title = {{Sharing Spaces with Robots in a Home Scenario – Anthropomorphic Attributions and their Effect on Proxemic Expectations and Evaluations in a Live HRI Trial}}, booktitle = {AAAI Fall Symposium "AI in Eldercare: New Solutions to Old Problems"}, address = {Washington, DC, USA}, year = {2008}, pages = {116--123} } @inproceedings{Koay2008, author = {Kheng Lee Koay and Dag Sverre Syrdal and Michael L. Walters and Kerstin Dautenhahn}, title = {{Six Weeks in the Robot House - Exploratory Human-Robot Interaction Trials in a Domestic Setting}}, booktitle = {International Conferences on Advances in Computer-Human Interactions (ACHI)}, address = {Cancun, Mexico}, year = {2008} } @inproceedings{Koay2007a, author = {Kheng Lee Koay and Dag Sverre Syrdal and Michael L. Walters and Kerstin Dautenhahn}, title = {{Living with Robots: Investigating the Habituation Effect in Participants' Preferences during a Longitudinal Human-Robot Interaction}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Jeju Island, South Korea}, year = {2007}, pages = {564--569}, publisher = {IEEE} } @inproceedings{Syrdal2007a, author = {Dag Sverre Syrdal and Kheng Lee Koay and Mick L. Walters and Kerstin Dautenhahn}, title = {{A Personalized Robot Companion? - the Role of Individual Differences on Spatial Preferences in HRI Scenario}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Jeju Island, South Korea}, year = {2007}, pages = {1143--1148}, publisher = {IEEE} } @inproceedings{Walters2007a, author = {M. L. Walters and K. Dautenhahn and R. te Boekhorst and K.L. Koay and S. N. Woods}, title = {{Exploring the Design Space of Robot Appearance and Behavior in an Attention-Seeking 'Living Room' Scenario for a Robot Companion}}, booktitle = {Symposium on Artificial Life}, address = {Honolulu, Hawaii, USA}, year = {2007}, pages = {341--347}, publisher = {IEEE} } @inproceedings{Walters2007b, author = {M. L. Walters and K. Dautenhahn and S. N. Woods and K.L. Koay}, title = {{Robotic Etiquette: Results from User Studies Involving a Fetch and Carry Task}}, booktitle = {International Conference on Human-Robot Interaction (HRI)}, address = {Washington, DC, USA}, year = {2007}, pages = {317--324}, publisher = {ACM/IEEE} } @inproceedings{Syrdal2007b, author = {D. S. Syrdal and M. L. Walters and K. L. Koay and S. N. Woods and K. Dautenhahn}, title = {{Looking Good? Appearance Preferences and Robot Personality Inferences at Zero Acquaintance}}, booktitle = {Spring Symposium: Multidisciplinary Collaboration for Socially Assistive Robotics}, address = {Palo Alto, California}, year = {2007}, pages = {86--92}, publisher = {AAAI} } @inproceedings{Koay2007b, author = {K.L. Koay and E.A. Sisbot and D.S. Syrdal and M.L. Walters and K. Dautenhahn and R. Alami}, title = {{Exploratory Studies of a Robot Approaching a Person in the Context of Handing Over an Object}}, booktitle = {Spring Symposium: Multidisciplinary Collaboration for Socially Assistive Robotics}, address = {Palo Alto, California}, year = {2007}, pages = {18--24}, publisher = {AAAI} } @inproceedings{Walters2007c, author = {M.L.Walters and K. L. Koay and S. N. Woods and D. S. Syrdal and K. Dautenhahn}, title = {{Robot to Human Approaches: Preliminary Results on Comfortable Distances and Preferences}}, booktitle = {Spring Symposium: Multidisciplinary Collaboration for Socially Assistive Robotics}, address = {Palo Alto, California}, year = {2007}, pages = {103--109}, publisher = {AAAI} } @inproceedings{Koay2006a, author = {K. L. Koay and M. L. Walters and S. N. Woods and K. Dautenhahn}, title = {{Empirical Results from Using a Comfort Level Device in Human-Robot Interaction Studies}}, booktitle = {International Conference on Human-Robot Interaction (HRI)}, address = {Salt Lake City, Utah, USA}, year = {2006}, pages = {194-201}, publisher = {ACM} } @inproceedings{Dautenhahn2006, author = {Kerstin Dautenhahn and Sarah Woods and Michael Walters and Kheng-Lee Koay and Chrystopher Nehaniv and Emrah Sisbot and Rachid Alami and Thierry Simeon}, title = {{How may I serve you? A robot companion approaching a seated person in a helping context}}, booktitle = {International Conference on Human-Robot Interaction (HRI)}, address = {Salt Lake City, Utah, USA}, year = {2006}, pages = {172--179}, publisher = {ACM} } @inproceedings{Woods2006, author = {Sarah Naomi Woods and Michael Leonard Walters and Kheng Lee Koay and Kerstin Dautenhahn}, title = {{Methodological Issues in HRI: A Comparison of Live and Video-Based Methods in Robot to Human Approach Direction Trial}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Hatfield, UK}, year = {2006}, pages = {51--58}, publisher = {IEEE} } @inproceedings{Koay2006b, author = {Kheng Lee Koay and Zoran Zivkovic and Kerstin Dautenhahn and Michael Leonard Walters and Nuno Otero and Aris Alissandrakis}, title = {{Methodological Issues of Annotating Vision Sensor Data Using Subjects' Own Judgement of Comfort in a Robot Human Following Experiment}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Hatfield, UK}, year = {2006}, pages = {66--73}, publisher = {IEEE} } @inproceedings{Syrdal2006, author = {Dag Sverre Syrdal and Kerstin Dautenhahn and Sarah Naomi Woods and Michael Leonard Walters and Kheng Lee Koay}, title = {{'Doing the Right Thing Wrong' - Personality and Tolerance to Uncomfortable Robot Approaches}}, booktitle = {International Symposium on Robot and Human Interactive Communication (RO-MAN)}, address = {Hatfield, UK}, year = {2006}, pages = {183--188}, publisher = {IEEE} } @inproceedings{Walters2005a, author = {M. L. Walters and K. Dautenhahn and K. L. Koay and C. Kaouri and R. te Boekhorst and C. L. Nehaniv and I. Werry and D. Lee}, title = {{Close encounters: Spatial distances between people and a robot of mechanistic appearance}}, booktitle = {International Conference on Humanoid Robots (Humanoids)}, address = {Tsukuba, Japan}, year = {2005}, pages = {450--455}, publisher = {IEEE-RAS} } @inproceedings{Woods2005, author = {S. Woods and K. Dautenhahn and C. Kaouri and R. te Boekhorst and K. L. Koay}, title = {{Is this robot like me? Links between human and robot personality traits}}, booktitle = {International Conference on Humanoid Robots (Humanoids)}, address = {Tsukuba, Japan}, year = {2005}, pages = {375--380}, publisher = {IEEE-RAS} } @inproceedings{Sisbot2005, author = {E. A. Sisbot and R. Alami and T. Simeon and K. Dautenhahn and M. Walters and S. Woods and K. L. Koay and C. L. Nehaniv}, title = {{Navigation in the presence of humans}}, booktitle = {International Conference on Humanoid Robots (Humanoids)}, address = {Tsukuba, Japan}, year = {2005}, pages = {181--188}, publisher = {IEEE-RAS} } @inproceedings{Walters2005b, author = {M Walters and S Woods and K L Koay and K Dautenhahn}, title = {{Practical and methodological challenges in designing and conducting interaction studies with human subjects}}, booktitle = {AISB Symposium on Robot Companions Hard Problems and Open Challenges in Human-Robot Interaction}, address = {Hatfield, UK}, year = {2005}, pages = {110--120}, publisher = {SSAISB} } @inproceedings{Dautenhahn2005, author = {K. Dautenhahn and S Woods and C Kaouri and M Walters and K L Koay and I Werry}, title = {{What is a Robot Companion - Friend, Assistant or Butler?}}, booktitle = {nternational Conference on Intelligent Robots and Systems (IROS)}, address = {Edmonton, Alberta, Canada}, year = {2005}, pages = {1488--1493}, publisher = {IEEE IRS/RSJ} } @inproceedings{Boekhorst2005, author = {R. te Boekhorst and M. Walters and K. L. Koay and K. Dautenhahn and C. Nehaniv}, title = {{A Study of a Single Robot Interacting with Groups of Children in a Rotation Game Scenario}}, booktitle = {International Symposium on Computational Intelligence in Robotics and Automation (CIRA)}, address = {Espoo, Finland}, year = {2005}, pages = {35--40}, publisher = {IEEE} }