@inproceedings{discovery10154593,
          series = {International Conference on New Interfaces for Musical Expression (NIME)},
            year = {2022},
           month = {July},
         address = {Auckland, New Zealand},
         journal = {NIME 2022},
            note = {This is an Open Access paper published under a Creative Commons Attribution 4.0 International (CC BY 4.0) Licence (https://creativecommons.org/licenses/by/4.0/).},
          volume = {2022},
       publisher = {University of Auckland / Waipapa Taumata Rau},
           title = {Membrana Neopermeable},
       booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression (NIME 2022)},
          author = {Rhodes, Chris},
             url = {https://nime2022.org/},
        abstract = {Membrana Neopermeable is a composition for physical acoustic guitar, virtual
guitar, mixed reality (MR) and live electronics. It seeks to investigate the potential of
the latest developments in MR and machine learning (ML) technology when
questioning the boundaries, and compositional opportunities, between physical and
digital music instruments; here, the guitar.
The piece is performed by interacting with both a physical acoustic guitar and a virtual
guitar, where the latter 'appears' in the same physical space as the performer. This
results in a format which is an interactive, MR, compositional experience, made
possible through using: an Oculus Quest 2 head-mounted display (HMD - allowing
digital overlays, when worn, to appear in the same physical space as the performer),
Myo armband sensors worn on the arms (allowing the performer to make custom
gestures within MR), the Unity game engine (hosting the Oculus Quest 2/Passthrough
API, the standalone project application itself and allowing for C\# scripting, physics
mechanics, modelling and digital animation), Max 8 (to receive/process biometric
information from the Myo armbands and using such biometric data to
generate/manipulate sound materials in real-time), and finally Wekinator (to facilitate
the ML of custom musical gestures made by the performer; processing and classifying
performer biometric information during performance).
Ultimately, through deconstructing the barriers between physical and virtual
instrument performance/composition, this piece seeks to observe how future
multimodal spaces can be used as compositional assets.}
}