
Paulo Bala is a postdoctoral researcher at Interactive Technologies Institute (ITI), LARSyS, having received a Ph.D. in Digital Media from Faculdade de Ciências e Tecnologias (FCT) of Universidade Nova de Lisboa (UNL), Lisboa, Portugal in 2022. I hold a MA. of Entertainment Technology (2015) from Carnegie Mellon University (USA) and University of Madeira (Portugal), and a BSc. (2010) and MSc. (2013) in Informatics Engineering from University of Madeira. I’m a teaching assistant (User Centered Computing; Service Design) at Instituto Superior Técnico (IST/UL), Lisboa, Portugal. I have participated as a research member in EU and FCT funded projects and my main areas of expertise are Human-Computer Interaction, Entertainment Technology, and eXtended Reality. My research work has been published in top peer-reviewed venues in Human-Computer Interaction like CHI and ISMAR.
Research Interests: Human Computer Interaction; Virtual Reality; 360º Video; Augmented Reality; Entertainment Technology.
Selected Publications

Bala, Paulo; Sanches, Pedro; Cesário, Vanessa; Leão, Sarah; Rodrigues, Catarina; Nunes, Nuno Jardim; Nisi, Valentina
Towards Critical Heritage in the wild: Analysing Discomfort through Collaborative Autoethnography Inproceedings Forthcoming
In: Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, Hamburg, Germany, Forthcoming.
@inproceedings{bala2023,
title = {Towards Critical Heritage in the wild: Analysing Discomfort through Collaborative Autoethnography},
author = {Paulo Bala and Pedro Sanches and Vanessa Ces\'{a}rio and Sarah Le\~{a}o and Catarina Rodrigues and Nuno Jardim Nunes and Valentina Nisi},
url = {https://doi.org/10.1145/3544548.3581274},
doi = {10.1145/3544548.3581274},
year = {2023},
date = {2023-04-25},
urldate = {2023-04-25},
booktitle = {Proceedings of the 2023 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {Hamburg, Germany},
series = {CHI '23},
abstract = {As we engaged in designing digital interventions for intercultural dialogues around public cultural heritage sites, we saw an opportunity to surface multiple interpretations and points of view of history and shine a critical lens on current societal issues. To do so, we present the results of a collaborative auto-ethnography of alternative tours accompanied by intercultural guides, to explore sensory and embodied engagements with cultural heritage sites in a southern European capital. By focusing on the differences in how we experienced the heritage sites, we analyse the duality of discomfort, a common concept in HCI, in that it can both be deployed as a resource for designing systems that can transform people’s understanding of history or it can be a hindrance for engagement, having an unequal effect on individuals.},
keywords = {},
pubstate = {forthcoming},
tppubtype = {inproceedings}
}

Bala, Paulo; Oakley, Ian; Nisi, Valentina; Nunes, Nuno
Dynamic Field of View Restriction in 360º Video: Aligning Optical Flow and Visual SLAM to Mitigate VIMS Inproceedings
In: Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems, Association for Computing Machinery, New York, NY, USA, 2021.
@inproceedings{BalaCHI2021,
title = {Dynamic Field of View Restriction in 360º Video: Aligning Optical Flow and Visual SLAM to Mitigate VIMS},
author = {Paulo Bala and Ian Oakley and Valentina Nisi and Nuno Nunes },
url = {https://dl.acm.org/citation.cfm?id=3411764.3445499},
doi = {https://doi.org/10.1145/3411764.3445499},
year = {2021},
date = {2021-05-08},
booktitle = {Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
abstract = {Head-Mounted Display based Virtual Reality is proliferating. However, Visually Induced Motion Sickness (VIMS), which prevents many from using VR without discomfort, bars widespread adoption. Prior work has shown that limiting the Field of View (FoV) can reduce VIMS at a cost of also reducing presence. Systems that dynamically adjust a user's FoV may be able to balance these concerns. To explore this idea, we present a technique for standard 360º video that shrinks FoVs only during VIMS inducing scenes. It uses Visual Simultaneous Localization and Mapping and peripheral optical flow to compute camera movements and reduces FoV during rapid motion or optical flow. A user study (N=23) comparing 360º video with unrestricted-FoVs (90º), reduced fixed-FoVs (40º) and dynamic-FoVs (40º-90º) revealed that dynamic-FoVs mitigate VIMS while maintaining presence. We close by discussing the user experience of dynamic-FoVs and recommendations for how they can help make VR comfortable and immersive for all.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Bala, Paulo; Oakley, Ian; Nisi, Valentina; Nunes, Nuno
Staying on Track: a Comparative Study on the Use of Optical Flow in 360º Video to Mitigate VIMS Inproceedings
In: ACM International Conference on Interactive Media Experiences, pp. 82–93, 2020.
@inproceedings{bala2020staying,
title = {Staying on Track: a Comparative Study on the Use of Optical Flow in 360º Video to Mitigate VIMS},
author = {Paulo Bala and Ian Oakley and Valentina Nisi and Nuno Nunes},
url = {https://dl.acm.org/doi/abs/10.1145/3391614.3393658
https://www.youtube.com/watch?v=wTq98BrFs-4},
doi = {https://doi.org/10.1145/3391614.3393658},
year = {2020},
date = {2020-01-01},
booktitle = {ACM International Conference on Interactive Media Experiences},
pages = {82--93},
abstract = {Visually Induced Motion Sickness (VIMS), when the visual system detects motion that is not felt by the vestibular system, is a deterrent for first-time Virtual Reality (VR) users and can impact its adoption rate. Constricting the field-of-view (FoV) has been shown to reduce VIMS as it conceals optical flow in peripheral vision, which is more sensitive to motion. Additionally, several studies have suggested the inclusion of visual elements (e.g., grids) consistent with the real world as reference points. In this paper, we describe a novel technique dynamically controlled by a video’s precomputed optical flow and participants’ runtime head direction and evaluate it in a within-subjects study (N = 24) on a 360° video of a roller coaster. Furthermore, based on a detailed analysis of the video and participant’s experience, we provide insights on the effectiveness of the techniques in VIMS reduction and discuss the role of optical flow in the design and evaluation of the study.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Bala, Paulo; Masu, Raul; Nisi, Valentina; Nunes, Nuno
"When the Elephant Trumps": A Comparative Study on Spatial Audio for Orientation in 360º Videos Inproceedings
In: Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems, 2019.
@inproceedings{bala_when_2019,
title = {"When the Elephant Trumps": A Comparative Study on Spatial Audio for Orientation in 360º Videos},
author = {Paulo Bala and Raul Masu and Valentina Nisi and Nuno Nunes},
url = {https://dl.acm.org/citation.cfm?id=3290605.3300925},
doi = {https://doi.org/10.1145/3290605.3300925},
year = {2019},
date = {2019-01-01},
booktitle = {Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems},
abstract = {Orientation is an emerging issue in cinematic Virtual Reality (VR), as viewers may fail in locating points of interest. Recent strategies to tackle this research problem have investigated the role of cues, specifically diegetic sound effects. In this paper, we examine the use of sound spatialization for orientation purposes, namely by studying different spatialization conditions ("none", "partial", and "full" spatial manipulation) of multitrack soundtracks. We performed a between-subject mixed-methods study with 36 participants, aided by Cue Control, a tool we developed for dynamic spatial sound editing and data collection/analysis. Based on existing literature on orientation cues in 360º and theories on human listening, we discuss situations in which the spatialization was more effective (namely, "full" spatial manipulation both when using only music and when combining music and diegetic effects), and how this can be used by creators of 360º videos.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Bala, Paulo; Dionísio, Dina; Nisi, Valentina; Nunes, Nuno
Visually induced motion sickness in 360 videos: comparing and combining visual optimization techniques. Inproceedings
In: IEEE International Symposium for Mixed and Augmented Reality 2018, 2018.
@inproceedings{bala_visually_2018,
title = {Visually induced motion sickness in 360 videos: comparing and combining visual optimization techniques.},
author = {Paulo Bala and Dina Dion\'{i}sio and Valentina Nisi and Nuno Nunes},
url = {https://ieeexplore.ieee.org/document/8699261},
doi = {10.1109/ISMAR-Adjunct.2018.00077},
year = {2018},
date = {2018-01-01},
booktitle = {IEEE International Symposium for Mixed and Augmented Reality 2018},
abstract = {As head mounted displays (HMDs) become everyday consumer items, the potential of immersive Virtual Reality (VR) as a design space becomes ever more promising. However, their usage is impeded by human factors inherent to the technology itself, such as visually induced motion sickness (VIMS), caused by the disconnect between what is visually and physically perceived. Previous work on VIMS reduction has explored techniques targeting HMDs, while others explored techniques that target the multimedia content itself through visual optimization. The latter are often studied individually and cannot be applied to certain VR content such as 360° video. Consequently, this paper describes an exploratory study comparing and combining such techniques (independent visual background and restricted field of view) in 360° video. The work provides constructive insights for VR designers, while also exploring how analytics of VR content and user experience can be used for VIMS prevention and evaluation.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}

Bala, Paulo; Masu, Raul; Nisi, Valentina; Nunes, Nuno
Cue Control: Interactive Sound Spatialization for 360º Videos Inproceedings
In: International Conference on Interactive Digital Storytelling, pp. 333–337, Springer, Cham, 2018.
@inproceedings{bala_cue_2018,
title = {Cue Control: Interactive Sound Spatialization for 360º Videos},
author = {Paulo Bala and Raul Masu and Valentina Nisi and Nuno Nunes},
url = {https://link.springer.com/chapter/10.1007%2F978-3-030-04028-4_36},
doi = {10.1007/978-3-030-04028-4_36},
year = {2018},
date = {2018-01-01},
booktitle = {International Conference on Interactive Digital Storytelling},
pages = {333--337},
publisher = {Springer, Cham},
abstract = {In the 360º videos, the role of sound became crucial as it not only contributes to the participant’s level of Presence (the feeling of being in the virtual environment) but can also provide viewers with a periodical awareness of their surroundings; therefore, audio can guide user attention toward desired points. In this sense, the sonic elements of a 360º video assume an interactive role, as sounds become notifying elements or icons. In the paper, we describe Cue Control, an audio editor that facilitates the creation of soundtracks for 360º videos. The user can control the location of the sonic elements by positioning the sounds in the virtual 3D space following the desired timeline; Cue Control automatically creates a cue list of the spatial soundtrack events for playback. The software also allows for different interactive modalities of playback, adapting the cue list to the viewpoint of the user. We conducted a small pilot study where Cue Control was used to assemble the soundtrack of two 360º videos. According to the data gathered, we present some preliminary reflections about the use of sound to guide users’ attention in 360º videos towards points of interest.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Bala, Paulo; Dionisio, Mara; Nisi, Valentina; Nunes, Nuno
IVRUX: A tool for analyzing immersive narratives in virtual reality Inproceedings
In: International Conference on Interactive Digital Storytelling, pp. 3–11, Springer, Cham, 2016.
@inproceedings{bala_ivrux:_2016,
title = {IVRUX: A tool for analyzing immersive narratives in virtual reality},
author = {Paulo Bala and Mara Dionisio and Valentina Nisi and Nuno Nunes},
url = {https://link.springer.com/chapter/10.1007/978-3-319-48279-8_1},
doi = {10.1007/978-3-319-48279-8_1},
year = {2016},
date = {2016-01-01},
booktitle = {International Conference on Interactive Digital Storytelling},
pages = {3--11},
publisher = {Springer, Cham},
abstract = {This paper describes IVRUX, a tool for the analysis of 360º Immersive Virtual Reality (IVR) story-driven experiences. Traditional cinema offers an immersive experience through surround sound technology and high definition screens. However, in 360º IVR the audience is in the middle of the action, everything is happening around them. The immersiveness and freedom of choice brings new challenges into narrative creation, hence the need for a tool to help the process of evaluating user experience. Starting from “The Old Pharmacy”, a 360º Virtual Reality scene, we developed IVRUX, a tool that records users’ experience while visualizing the narrative. In this way, we are able to reconstruct the user’s experience and understand where their attention is focused. In this paper, we present results from a study done using 32 participants and, through analyzing the results, provide insights that help creators to understand how to enhance 360º Immersive Virtual Reality story driven experiences.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Selected Projects
Demo Reel
Last demo reel update: 2015