Publications
This page shows refereed journal articles, conference articles, and book chapters.
2024
-
Minsik Choi, Josh Andres, and Charles Patrick Martin. 2024. Tonal Cognition in Sonification: Exploring the Needs of Practitioners in Sonic Interaction Design. Proceedings of the 19th International Audio Mostly Conference: Explorations in Sonic Cultures, Association for Computing Machinery, pp. 218–228. http://doi.org/10.1145/3678299.3678321
DOI URL PDF BibTex
@inproceedings{Choi:2024, author = {Choi, Minsik and Andres, Josh and Martin, Charles Patrick}, title = {Tonal Cognition in Sonification: Exploring the Needs of Practitioners in Sonic Interaction Design}, year = {2024}, isbn = {9798400709685}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3678299.3678321}, doi = {10.1145/3678299.3678321}, booktitle = {Proceedings of the 19th International Audio Mostly Conference: Explorations in Sonic Cultures}, pages = {218--228}, numpages = {11}, location = {Milan, Italy}, series = {AM '24}, keywords = {conference-paper, refereed}, pdf = {https://dl.acm.org/doi/pdf/10.1145/3678299.3678321} }
-
Xinlei Niu, Jing Zhang, and Charles Patrick Martin. 2024. HybridVC: Efficient Voice Style Conversion with Text and Audio Prompts. Interspeech 2024, pp. 4368–4372. http://doi.org/10.21437/Interspeech.2024-46
DOI URL PDF BibTex
@inproceedings{Niu:2024ac, title = {HybridVC: Efficient Voice Style Conversion with Text and Audio Prompts}, author = {Niu, Xinlei and Zhang, Jing and Martin, Charles Patrick}, year = {2024}, booktitle = {Interspeech 2024}, pages = {4368--4372}, doi = {10.21437/Interspeech.2024-46}, pdf = {https://www.isca-archive.org/interspeech_2024/niu24_interspeech.pdf}, url = {https://www.isca-archive.org/interspeech_2024/niu24_interspeech.html}, issn = {2958-1796}, keywords = {conference-paper, refereed} }
-
Xinlei Niu, Christian Walder, Jing Zhang, and Charles Patrick Martin. 2024. Latent Optimal Paths by Gumbel Propagation for Variational Bayesian Dynamic Programming. Proceedings of the 41st International Conference on Machine Learning, PMLR, pp. 38316–38343.
URL PDF BibTex
@inproceedings{Niu:2024ab, title = {Latent Optimal Paths by {G}umbel Propagation for Variational {B}ayesian Dynamic Programming}, author = {Niu, Xinlei and Walder, Christian and Zhang, Jing and Martin, Charles Patrick}, booktitle = {Proceedings of the 41st International Conference on Machine Learning}, pages = {38316--38343}, year = {2024}, editor = {Salakhutdinov, Ruslan and Kolter, Zico and Heller, Katherine and Weller, Adrian and Oliver, Nuria and Scarlett, Jonathan and Berkenkamp, Felix}, volume = {235}, series = {Proceedings of Machine Learning Research}, month = {21--27 Jul}, publisher = {PMLR}, pdf = {https://raw.githubusercontent.com/mlresearch/v235/main/assets/niu24b/niu24b.pdf}, url = {https://proceedings.mlr.press/v235/niu24b.html}, keywords = {conference-paper, refereed} }
-
Xinlei Niu, Jing Zhang, Christian Walder, and Charles Patrick Martin. 2024. SoundLoCD: An Efficient Conditional Discrete Contrastive Latent Diffusion Model for Text-to-Sound Generation. ICASSP 2024 - 2024 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 261–265. http://doi.org/10.1109/ICASSP48485.2024.10446349
DOI BibTex
@inproceedings{Niu:2024aa, author = {Niu, Xinlei and Zhang, Jing and Walder, Christian and Martin, Charles Patrick}, booktitle = {ICASSP 2024 - 2024 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)}, date-added = {2024-04-12 16:16:53 +1000}, date-modified = {2024-04-12 16:17:08 +1000}, doi = {10.1109/ICASSP48485.2024.10446349}, keywords = {conference-paper, refereed}, pages = {261-265}, title = {SoundLoCD: An Efficient Conditional Discrete Contrastive Latent Diffusion Model for Text-to-Sound Generation}, year = {2024}, bdsk-url-1 = {https://doi.org/10.1109/ICASSP48485.2024.10446349} }
-
Benedikte Wallace, Kristian Nymoen, Jim Torresen, and Charles Patrick Martin. 2024. Breaking from realism: exploring the potential of glitch in AI-generated dance. Digital Creativity 35, 2, 125–142. http://doi.org/10.1080/14626268.2024.2327006
DOI BibTex
@article{Wallace:2024aa, author = {Wallace, Benedikte and Nymoen, Kristian and Torresen, Jim and Martin, Charles Patrick}, doi = {10.1080/14626268.2024.2327006}, journal = {Digital Creativity}, keywords = {refereed, journal-article}, number = {2}, pages = {125--142}, publisher = {Routledge}, title = {Breaking from realism: exploring the potential of glitch in AI-generated dance}, volume = {35}, year = {2024}, bdsk-url-1 = {https://doi.org/10.1080/14626268.2024.2327006} }
2023
-
Benedikte Wallace, Clarice Hilton, Kristian Nymoen, Jim Torresen, Charles Patrick Martin, and Rebecca Fiebrink. 2023. Embodying an Interactive AI for Dance Through Movement Ideation. Proceedings of the 15th Conference on Creativity and Cognition, Association for Computing Machinery, pp. 454–464. http://doi.org/10.1145/3591196.3593336
Abstract DOI URL BibTexWhat expectations exist in the minds of dancers when interacting with a generative machine learning model? During two workshop events, experienced dancers explore these expectations through improvisation and role-play, embodying an imagined AI-dancer. The dancers explored how intuited flow, shared images, and the concept of a human replica might work in their imagined AI-human interaction. Our findings challenge existing assumptions about what is desired from generative models of dance, such as expectations of realism, and how such systems should be evaluated. We further advocate that such models should celebrate non-human artefacts, focus on the potential for serendipitous moments of discovery, and that dance practitioners should be included in their development. Our concrete suggestions show how our findings can be adapted into the development of improved generative and interactive machine learning models for dancers’ creative practice.
@inproceedings{Wallace:2023aa, address = {New York, NY, USA}, author = {Wallace, Benedikte and Hilton, Clarice and Nymoen, Kristian and Torresen, Jim and Martin, Charles Patrick and Fiebrink, Rebecca}, booktitle = {Proceedings of the 15th Conference on Creativity and Cognition}, date-added = {2024-04-12 16:17:46 +1000}, date-modified = {2024-04-12 16:18:00 +1000}, doi = {10.1145/3591196.3593336}, isbn = {9798400701801}, keywords = {conference-paper, refereed}, numpages = {11}, pages = {454--464}, publisher = {Association for Computing Machinery}, series = {C&C '23}, title = {Embodying an Interactive AI for Dance Through Movement Ideation}, url = {https://doi.org/10.1145/3591196.3593336}, year = {2023}, bdsk-url-1 = {https://doi.org/10.1145/3591196.3593336} }
-
Yichen Wang, Mingze Xi, Matt Adcock, and Charles Patrick Martin. 2023. Mobility, Space and Sound Activate Expressive Musical Experience in Augmented Reality. Proceedings of the International Conference on New Interfaces for Musical Expression, pp. 128–133.
Abstract URL BibTexWe present a study of a freehand musical system to investigate musicians’ experiences related to performance in augmented reality (AR). Head-mounted mixed reality computers present opportunities for natural gestural control in three dimensions, particularly when using hand-tracking in a creative interface. Existing musical interfaces with head-mounted displays use dedicated input devices that are not designed specifically for musical gestures and may not support appropriate interactions. We are yet to see widespread adoption of head-mounted AR musical instruments. We conducted an empirical study to evaluate musicians’ (N=20) experience of performing with a freehand musical interface. The results suggest that the design of freehand musical interaction in the AR space is highly learnable and explorable, and that such systems can leverage unique aspects of mobility, space and sound to deliver an engaging and playful musical experience. The mobile musical experience with a spatial interface design allowed performers to be more bodily engaged and facilitated gestural exploration for musical creativity. This work contributes to a more developed understanding of potentials and challenges in AR-based interface design for musical creativity.
@inproceedings{Wang:2023aa, address = {Mexico City, Mexico}, articleno = {17}, author = {Wang, Yichen and Xi, Mingze and Adcock, Matt and Martin, Charles Patrick}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-added = {2024-04-12 16:15:52 +1000}, date-modified = {2024-04-12 16:16:17 +1000}, editor = {Ortiz, Miguel and Marquez-Borbon, Adnan}, issn = {2220-4806}, keywords = {refereed, conference-paper}, month = may, numpages = {6}, pages = {128--133}, title = {Mobility, Space and Sound Activate Expressive Musical Experience in Augmented Reality}, track = {Papers}, url = {http://nime.org/proceedings/2023/nime2023_17.pdf}, year = {2023}, bdsk-url-1 = {http://nime.org/proceedings/2023/nime2023_17.pdf} }
-
Charles Patrick Martin, Alexander Hunter, Brent Schuetze, and Yichen Wang. 2023. Composing Interface Connections for a Networked Touchscreen Ensemble. Proceedings of the 4th International Symposium on the Internet of Sounds, IEEE, pp. 1–5. http://doi.org/10.1109/IEEECONF59510.2023.10335226
DOI Preprint PDF BibTex
@inproceedings{Martin:2023ab, author = {Martin, Charles Patrick and Hunter, Alexander and Schuetze, Brent and Wang, Yichen}, booktitle = {Proceedings of the 4th International Symposium on the Internet of Sounds}, date-added = {2024-04-12 16:14:37 +1000}, date-modified = {2024-04-12 16:14:56 +1000}, doi = {10.1109/IEEECONF59510.2023.10335226}, keywords = {refereed, conference-paper, collaboration, music}, pages = {1-5}, title = {Composing Interface Connections for a Networked Touchscreen Ensemble}, year = {2023}, series = {IS2 '23}, address = {Pisa, Italy}, month = oct, publisher = {{IEEE}}, preprint = {https://metatonetransfer.com/preprints/2023-composing-interface-connections.pdf}, bdsk-url-1 = {https://doi.org/10.1109/IEEECONF59510.2023.10335226} }
2022
-
Charles Patrick Martin. 2022. Performing with a Generative Electronic Music Controller. Joint Proceedings of the ACM IUI Workshops 2022.
URL Preprint PDF BibTex
@inproceedings{Martin:2022vf, author = {Martin, Charles Patrick}, booktitle = {Joint Proceedings of the ACM IUI Workshops 2022}, date-added = {2022-05-10 10:55:30 +1000}, date-modified = {2024-04-12 16:37:52 +1000}, keywords = {conference-paper, refereed}, month = mar, preprint = {https://hai-gen.github.io/2022/papers/paper-HAIGEN-MartinCharles.pdf}, title = {Performing with a Generative Electronic Music Controller}, url = {https://ceur-ws.org/Vol-3124/paper10.pdf}, year = {2022}, bdsk-url-1 = {https://hai-gen.github.io/2022/papers/paper-HAIGEN-MartinCharles.pdf} }
-
Yichen Wang and Charles Martin. 2022. Designing Sound Synthesis Interfaces for Head-mounted Augmented Reality. 2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 351–353. http://doi.org/10.1109/VRW55335.2022.00078
DOI BibTex
@inproceedings{Wang:2022uh, author = {Wang, Yichen and Martin, Charles}, booktitle = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)}, date-added = {2022-05-10 10:54:54 +1000}, date-modified = {2022-05-10 10:55:02 +1000}, doi = {10.1109/VRW55335.2022.00078}, keywords = {conference-paper, refereed}, pages = {351-353}, title = {Designing Sound Synthesis Interfaces for Head-mounted Augmented Reality}, year = {2022}, bdsk-url-1 = {https://doi.org/10.1109/VRW55335.2022.00078} }
-
Yichen Wang, Henry Gardner, Charles Martin, and Matt Adcock. 2022. Augmenting Sculpture with Immersive Sonification. 2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), pp. 626–627. http://doi.org/10.1109/VRW55335.2022.00164
DOI BibTex
@inproceedings{Wang:2022um, author = {Wang, Yichen and Gardner, Henry and Martin, Charles and Adcock, Matt}, booktitle = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)}, date-added = {2022-05-10 10:54:15 +1000}, date-modified = {2022-05-10 10:54:34 +1000}, doi = {10.1109/VRW55335.2022.00164}, keywords = {conference-paper, refereed}, pages = {626-627}, title = {Augmenting Sculpture with Immersive Sonification}, year = {2022}, bdsk-url-1 = {https://doi.org/10.1109/VRW55335.2022.00164} }
-
Michael Muller, Lydia B Chilton, Anna Kantosalo, Charles Patrick Martin, and Greg Walsh. 2022. GenAICHI: Generative AI and HCI. CHI Conference on Human Factors in Computing Systems Extended Abstracts, Association for Computing Machinery. http://doi.org/10.1145/3491101.3503719
Abstract DOI BibTexThis workshop applies human centered themes to a new and powerful technology, generative artificial intelligence (AI). Unlike AI systems that produce decisions or descriptions, generative AI systems can produce new and creative content that can include images, texts, music, video, and other forms of design. The results are often similar to results produced by humans. However, it is not yet clear how humans make sense of generative AI algorithms or their outcomes. It is also not yet clear how humans can control and more generally, interact with, these powerful capabilities. Finally, it is not clear what kinds of collaboration patterns will emerge when creative humans and creative technologies work together. It is time to convene the interdisciplinary research domain of generative AI and HCI. Participation in this invitational workshop is open to seasoned scholars and early career researchers. We solicit descriptions of completed projects, works-in-progress, and provocations. Together we will develop theories and practices in this intriguing new domain.
@inproceedings{Muller:2022ul, address = {New York, NY, USA}, articleno = {110}, author = {Muller, Michael and Chilton, Lydia B and Kantosalo, Anna and Martin, Charles Patrick and Walsh, Greg}, booktitle = {CHI Conference on Human Factors in Computing Systems Extended Abstracts}, date-added = {2022-05-10 10:52:30 +1000}, date-modified = {2022-05-10 10:53:12 +1000}, doi = {10.1145/3491101.3503719}, isbn = {9781450391566}, keywords = {conference-paper, refereed}, location = {New Orleans, LA, USA}, numpages = {7}, publisher = {Association for Computing Machinery}, series = {CHI EA '22}, title = {GenAICHI: Generative AI and HCI}, year = {2022}, bdsk-url-1 = {https://doi.org/10.1145/3491101.3503719} }
2021
-
Benedikte Wallace, Charles Patrick Martin, Jim Tørresen, and Kristian Nymoen. 2021. Exploring the Effect of Sampling Strategy on Movement Generation with Generative Neural Networks. Artificial Intelligence in Music, Sound, Art and Design: 10th International Conference, EvoMUSART 2021, Springer International Publishing, pp. 344–359. http://doi.org/https://doi.org/10.1007/978-3-030-72914-1_23
DOI BibTex
@inproceedings{Wallace:2021vh, author = {Wallace, Benedikte and Martin, Charles Patrick and T{\o}rresen, Jim and Nymoen, Kristian}, booktitle = {Artificial Intelligence in Music, Sound, Art and Design: 10th International Conference, EvoMUSART 2021}, date-added = {2021-04-30 22:07:07 +1000}, date-modified = {2021-04-30 22:09:01 +1000}, doi = {https://doi.org/10.1007/978-3-030-72914-1_23}, keywords = {conference-paper, refereed}, pages = {344-359}, publisher = {Springer International Publishing}, title = {Exploring the Effect of Sampling Strategy on Movement Generation with Generative Neural Networks}, year = {2021}, bdsk-url-1 = {https://doi.org/10.1007/978-3-030-72914-1_23} }
-
Robert Neil McArthur and Charles Patrick Martin. 2021. An Application for Evolutionary Music Composition Using Autoencoders. Artificial Intelligence in Music, Sound, Art and Design: 10th International Conference, EvoMUSART 2021, Springer International Publishing, pp. 443–458. http://doi.org/https://doi.org/10.1007/978-3-030-72914-1_29
DOI BibTex
@inproceedings{McArthur:2021vj, author = {McArthur, Robert Neil and Martin, Charles Patrick}, booktitle = {Artificial Intelligence in Music, Sound, Art and Design: 10th International Conference, EvoMUSART 2021}, date-added = {2021-04-30 22:05:45 +1000}, date-modified = {2021-04-30 22:10:02 +1000}, doi = {https://doi.org/10.1007/978-3-030-72914-1_29}, keywords = {conference-paper, refereed}, pages = {443--458}, publisher = {Springer International Publishing}, title = {An Application for Evolutionary Music Composition Using Autoencoders}, year = {2021}, bdsk-url-1 = {https://doi.org/10.1007/978-3-030-72914-1_29} }
-
Tønnes F. Nygaard, Charles P. Martin, David Howard, Jim Torresen, and Kyrre Glette. 2021. Environmental Adaptation of Robot Morphology and Control through Real-world Evolution. Evolutionary Computation. http://doi.org/10.1162/evco_a_00291
DOI URL arXiv BibTex
@article{Nygaard:2021wy, archiveprefix = {arXiv}, author = {Nygaard, T{\o}nnes F. and Martin, Charles P. and Howard, David and {Torresen}, Jim and Glette, Kyrre}, date-added = {2021-03-19 17:43:33 +1100}, date-modified = {2021-04-01 21:39:28 +1100}, doi = {10.1162/evco_a_00291}, eprint = {2003.13254}, journal = {Evolutionary Computation}, keywords = {journal-article, refereed}, title = {Environmental Adaptation of Robot Morphology and Control through Real-world Evolution}, url = {https://arxiv.org/abs/2003.13254}, year = {2021}, bdsk-url-1 = {https://arxiv.org/abs/2003.13254}, bdsk-url-2 = {https://doi.org/10.1162/evco_a_00291} }
-
Tønnes F. Nygaard, Charles P. Martin, Jim Torresen, Kyrre Glette, and David Howard. 2021. Real-world embodied AI through a morphologically adaptive quadruped robot through a morphologically adaptive quadruped robot. Nature Machine Intelligence. http://doi.org/10.1038/s42256-021-00320-3
DOI URL Video BibTex
@article{Nygaard:2021uj, author = {Nygaard, T{\o}nnes F. and Martin, Charles P. and Torresen, Jim and Glette, Kyrre and Howard, David}, date-added = {2021-03-19 17:19:42 +1100}, date-modified = {2021-03-19 17:22:07 +1100}, doi = {10.1038/s42256-021-00320-3}, journal = {Nature Machine Intelligence}, keywords = {journal-article, refereed}, title = {Real-world embodied {AI} through a morphologically adaptive quadruped robot through a morphologically adaptive quadruped robot}, url = {https://www.nature.com/articles/s42256-021-00320-3}, video = {https://youtu.be/8ZSP_KzrP_w}, year = {2021}, bdsk-url-1 = {https://www.nature.com/articles/s42256-021-00320-3}, bdsk-url-2 = {https://doi.org/10.1038/s42256-021-00320-3} }
2020
-
Sichao Li and Charles Patrick Martin. 2020. Comparing Three Data Representations for Music with a Sequence-to-Sequence Model. Australasian Joint Conference on Artificial Intelligence, pp. 16–28. http://doi.org/10.1007/978-3-030-64984-5_2
DOI BibTex
@inproceedings{Li:2020aa, author = {Li, Sichao and Martin, Charles Patrick}, booktitle = {Australasian Joint Conference on Artificial Intelligence}, date-added = {2020-12-09 15:56:58 +1100}, date-modified = {2020-12-09 15:58:59 +1100}, doi = {10.1007/978-3-030-64984-5_2}, keywords = {conference-paper, refereed}, pages = {16--28}, series = {Lecture Notes in Computer Science}, title = {Comparing Three Data Representations for Music with a Sequence-to-Sequence Model}, volume = {12576}, year = {2020}, bdsk-url-1 = {https://doi.org/10.1007/978-3-030-64984-5_2} }
-
Benedikte Wallace, Charles Patrick Martin, Jim Torresen, and Kristian Nymoen. 2020. Towards Movement Generation with Audio Features. In Proceedings of the 11th International Conference on Computational Creativity. Association for Computational Creativity, 284–287.
URL arXiv BibTex
@incollection{Wallace:2020aa, archiveprefix = {arXiv}, author = {Wallace, Benedikte and Martin, Charles Patrick and Torresen, Jim and Nymoen, Kristian}, booktitle = {Proceedings of the 11th International Conference on Computational Creativity}, date-added = {2020-12-09 15:49:49 +1100}, date-modified = {2020-12-09 15:54:34 +1100}, eprint = {2011.13453}, keywords = {conference-paper, refereed}, pages = {284--287}, url = {https://computationalcreativity.net/iccc20/papers/125-iccc20.pdf}, publisher = {Association for Computational Creativity}, title = {Towards Movement Generation with Audio Features}, year = {2020} }
-
Cagri Erdem, Qichao Lan, Julian Fuhrer, Charles Patrick Martin, Jim Torresen, and Alexander Refsum Jensenius. 2020. Towards Playing in the ‘Air’: Modeling Motion-Sound Energy Relationships in Electric Guitar Performance Using Deep Neural Networks. In Proceedings of the 17th Sound and Music Computing Conference. SMC Network, 177–184.
URL Preprint PDF BibTex
@incollection{Erdem:2020aa, author = {Erdem, Cagri and Lan, Qichao and Fuhrer, Julian and Martin, Charles Patrick and Torresen, Jim and Jensenius, Alexander Refsum}, booktitle = {Proceedings of the 17th Sound and Music Computing Conference}, date-added = {2020-12-09 15:38:00 +1100}, date-modified = {2020-12-09 17:03:07 +1100}, keywords = {conference-paper, refereed, bestpaper}, pages = {177--184}, preprint = {http://urn.nb.no/URN:NBN:no-82501}, publisher = {SMC Network}, title = {Towards Playing in the `Air': Modeling Motion-Sound Energy Relationships in Electric Guitar Performance Using Deep Neural Networks}, url = {http://urn.nb.no/URN:NBN:no-82501}, year = {2020}, bdsk-url-1 = {http://urn.nb.no/URN:NBN:no-82501} }
-
Rohan Proctor and Charles Patrick Martin. 2020. A Laptop Ensemble Performance System using Recurrent Neural Networks. Proceedings of the International Conference on New Interfaces for Musical Expression, Birmingham City University, pp. 43–48. http://doi.org/10.5281/zenodo.4813481
Abstract DOI URL arXiv Preprint PDF BibTexThe popularity of applying machine learning techniques in musical domains has created an inherent availability of freely accessible pre-trained neural network (NN) models ready for use in creative applications. This work outlines the implementation of one such application in the form of an assistance tool designed for live improvisational performances by laptop ensembles. The primary intention was to leverage off-the-shelf pre-trained NN models as a basis for assisting individual performers either as musical novices looking to engage with more experienced performers or as a tool to expand musical possibilities through new forms of creative expression. The system expands upon a variety of ideas found in different research areas including new interfaces for musical expression, generative music and group performance to produce a networked performance solution served via a web-browser interface. The final implementation of the system offers performers a mixture of high and low-level controls to influence the shape of sequences of notes output by locally run NN models in real time, also allowing performers to define their level of engagement with the assisting generative models. Two test performances were played, with the system shown to feasibly support four performers over a four minute piece while producing musically cohesive and engaging music. Iterations on the design of the system exposed technical constraints on the use of a JavaScript environment for generative models in a live music context, largely derived from inescapable processing overheads.
@inproceedings{NIME20_9, address = {Birmingham, UK}, archiveprefix = {arXiv}, author = {Proctor, Rohan and Martin, Charles Patrick}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-added = {2020-07-25 13:29:43 +1000}, date-modified = {2022-02-04 15:10:14 +1100}, doi = {10.5281/zenodo.4813481}, editor = {Michon, Romain and Schroeder, Franziska}, eprint = {2012.02322}, issn = {2220-4806}, keywords = {conference-paper, refereed}, month = jul, pages = {43--48}, preprint = {https://metatonetransfer.com/preprints/2020-NIME-LaptopEnsembleRNN.pdf}, publisher = {Birmingham City University}, title = {A Laptop Ensemble Performance System using Recurrent Neural Networks}, url = {https://www.nime.org/proceedings/2020/nime2020_paper9.pdf}, year = {2020}, bdsk-url-1 = {https://www.nime.org/proceedings/2020/nime2020_paper9.pdf} }
-
Charles Patrick Martin, Zeruo Liu, Yichen Wang, Wennan He, and Henry Gardner. 2020. Sonic Sculpture: Activating Engagement with Head-Mounted Augmented Reality. Proceedings of the International Conference on New Interfaces for Musical Expression, Birmingham City University, pp. 39–42. http://doi.org/10.5281/zenodo.4813445
Abstract DOI URL arXiv Preprint PDF BibTexWe describe a sonic artwork, "Listening To Listening", that has been designed to accompany a real-world sculpture with two prototype interaction schemes. Our artwork is created for the HoloLens platform so that users can have an individual experience in a mixed reality context. Personal AR systems have recently become available and practical for integration into public art projects, however research into sonic sculpture works has yet to account for the affordances of current portable and mainstream AR systems. In this work, we take advantage of the HoloLens’ spatial awareness to build sonic spaces that have a precise spatial relationship to a given sculpture and where the sculpture itself is modelled in the augmented scene as an "invisible hologram". We describe the artistic rationale for our artwork, the design of the two interaction schemes, and the technical and usability feedback that we have obtained from demonstrations during iterative development. This work appears to be the first time that head-mounted AR has been used to build an interactive sonic landscape to engage with a public sculpture.
@inproceedings{NIME20_8, address = {Birmingham, UK}, archiveprefix = {arXiv}, author = {Martin, Charles Patrick and Liu, Zeruo and Wang, Yichen and He, Wennan and Gardner, Henry}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-added = {2020-07-25 13:28:36 +1000}, date-modified = {2022-02-04 15:11:25 +1100}, doi = {10.5281/zenodo.4813445}, editor = {Michon, Romain and Schroeder, Franziska}, eprint = {2012.02311}, issn = {2220-4806}, keywords = {conference-paper, refereed}, month = jul, pages = {39--42}, preprint = {https://metatonetransfer.com/preprints/2020-NIME-SonicSculpture.pdf}, publisher = {Birmingham City University}, title = {Sonic Sculpture: Activating Engagement with Head-Mounted Augmented Reality}, url = {https://www.nime.org/proceedings/2020/nime2020_paper8.pdf}, year = {2020}, bdsk-url-1 = {https://www.nime.org/proceedings/2020/nime2020_paper8.pdf} }
-
Charles Patrick Martin, Kyrre Glette, Tønnes Frostad Nygaard, and Jim Torresen. 2020. Understanding Musical Predictions with an Embodied Interface for Musical Machine Learning. Frontiers in Artificial Intelligence 3, 6. http://doi.org/10.3389/frai.2020.00006
Abstract DOI URL Preprint PDF Video BibTexMachine-learning models of music often exist outside the worlds of musical performance practice and abstracted from the physical gestures of musicians. In this work, we consider how a recurrent neural network (RNN) model of simple music gestures may be integrated into a physical instrument so that predictions are sonically and physically entwined with the performer’s actions. We introduce EMPI, an embodied musical prediction interface that simplifies musical interaction and prediction to just one dimension of continuous input and output. The predictive model is a mixture density RNN trained to estimate the performer’s next physical input action and the time at which this will occur. Predictions are represented sonically, through synthesised audio, and physically, with a motorised output indicator. We use EMPI to investigate how performers understand and exploit different predictive models to make music through a controlled study of performances with different models and levels of physical feedback. We show that while performers often favour a model trained on human-sourced data, they find different musical affordances in models trained on synthetic, and even random, data. Physical representation of predictions seemed to affect the length of performances. This work contributes new understandings of how musicians use generative ML models in real-time performance backed up by experimental evidence. We argue that a constrained musical interface can expose the affordances of embodied predictive interactions.
@article{Martin:2020aa, author = {Martin, Charles Patrick and Glette, Kyrre and Nygaard, T{\o}nnes Frostad and Torresen, Jim}, date-added = {2020-02-10 09:42:30 +1100}, date-modified = {2020-12-04 15:21:58 +1100}, doi = {10.3389/frai.2020.00006}, journal = {Frontiers in Artificial Intelligence}, keywords = {journal-article, refereed}, month = feb, pages = {6}, preprint = {https://metatonetransfer.com/preprints/2020-understanding-musical-predictions-with-EMPI.pdf}, title = {Understanding Musical Predictions with an Embodied Interface for Musical Machine Learning}, url = {https://www.frontiersin.org/articles/10.3389/frai.2020.00006}, video = {https://youtu.be/tvgqxmHr9wU}, volume = {3}, year = {2020}, bdsk-url-1 = {https://doi.org/10.3389/frai.2020.00006} }
-
Charles Patrick Martin and Jim Torresen. 2020. Data Driven Analysis of Tiny Touchscreen Performance with MicroJam. Computer Music Journal 43, 4, 41–57. http://doi.org/10.1162/COMJ_a_00536
DOI URL arXiv Preprint PDF Video BibTex
@article{Martin:2019ab, archiveprefix = {arXiv}, author = {Martin, Charles Patrick and Torresen, Jim}, date-added = {2019-02-04 11:20:55 +0100}, date-modified = {2020-11-06 14:44:04 +1100}, doi = {10.1162/COMJ_a_00536}, eprint = {1902.00680}, journal = {Computer Music Journal}, keywords = {journal-article, refereed}, number = {4}, pages = {41--57}, preprint = {http://urn.nb.no/URN:NBN:no-84108}, title = {Data Driven Analysis of Tiny Touchscreen Performance with MicroJam}, url = {https://arxiv.org/abs/1902.00680}, video = {https://youtu.be/HhI47-XzrtI}, volume = {43}, year = {2020}, bdsk-url-1 = {https://arxiv.org/abs/1902.00680} }
2019
-
Tønnes F. Nygaard, Jørgen Nordmoen, Charles P. Martin, and Kyrre Glette. 2019. Lessons Learned from Real-World Experiments with DyRET: the Dynamic Robot for Embodied Testing. ICRA Legged Locomotion Workshop.
URL arXiv BibTex
@inproceedings{Nygaard:2019ac, archiveprefix = {arXiv}, author = {Nygaard, T{\o}nnes F. and Nordmoen, J{\o}rgen and Martin, Charles P. and Glette, Kyrre}, booktitle = {{ICRA} Legged Locomotion Workshop}, date-added = {2020-02-04 17:28:07 +1100}, date-modified = {2020-02-04 23:16:36 +1100}, eprint = {1905.05626}, keywords = {workshop-paper, conference-paper, non-refereed}, month = may, title = {Lessons Learned from Real-World Experiments with DyRET: the Dynamic Robot for Embodied Testing}, url = {http://arxiv.org/abs/1905.05626}, year = {2019}, bdsk-url-1 = {http://arxiv.org/abs/1905.05626} }
-
Ben Swift, Charles P. Martin, and Alexander Hunter. 2019. Two Perspectives on Rebooting Computer Music and Music Education: Composition and Computer Science. Proceedings of the Australasian Computer Music Conference, Australasian Computer Music Association, pp. 53–57. http://doi.org/10.25911/5e37e8d92ff89
DOI Preprint PDF BibTex
@inproceedings{Swift:2019aa, address = {Fitzroy, Australia}, author = {Swift, Ben and Martin, Charles P. and Hunter, Alexander}, booktitle = {Proceedings of the {Australasian} Computer Music Conference}, date-added = {2020-01-19 22:31:47 +1100}, date-modified = {2020-12-04 15:18:46 +1100}, doi = {10.25911/5e37e8d92ff89}, keywords = {conference-paper, refereed}, month = jul, pages = {53--57}, preprint = {https://metatonetransfer.com/preprints/2019-Swift-TwoPerspectives.pdf}, publisher = {Australasian Computer Music Association}, title = {Two Perspectives on Rebooting Computer Music and Music Education: Composition and Computer Science}, year = {2019}, bdsk-url-1 = {https://doi.org/10.25911/5e37e8d92ff89} }
-
Benedikte Wallace, Charles P. Martin, and Kristian Nymoen. 2019. Tracing from Sound to Movement with Mixture Density Recurrent Neural Networks. Proceedings of the 6th International Conference on Movement and Computing, Association for Computing Machinery. http://doi.org/10.1145/3347122.3371376
DOI Preprint PDF BibTex
@inproceedings{Wallace:2019ab, address = {New York, NY, USA}, articleno = {Article 31}, author = {Wallace, Benedikte and Martin, Charles P. and Nymoen, Kristian}, booktitle = {Proceedings of the 6th International Conference on Movement and Computing}, date-added = {2020-01-19 22:09:23 +1100}, date-modified = {2020-12-04 15:27:01 +1100}, doi = {10.1145/3347122.3371376}, isbn = {9781450376549}, keywords = {conference-paper, refereed}, location = {Tempe, AZ, USA}, month = oct, numpages = {4}, preprint = {https://metatonetransfer.com/preprints/2019-MOCO-Wallace-TracingSoundToMovement.pdf}, publisher = {Association for Computing Machinery}, series = {MOCO '19}, title = {Tracing from Sound to Movement with Mixture Density Recurrent Neural Networks}, year = {2019}, bdsk-url-1 = {https://doi.org/10.1145/3347122.3371376} }
-
Tønnes F. Nygaard, Jørgen Nordmoen, Kai Olav Ellefsen, Charles P. Martin, Jim Tørresen, and Kyrre Glette. 2019. Experiences from Real-World Evolution with DyRET: Dynamic Robot for Embodied Testing. Nordic Artificial Intelligence Research and Development, Springer International Publishing, pp. 58–68. http://doi.org/10.1007/978-3-030-35664-4_6
Abstract DOI BibTexCreating robust robot platforms that function in the real world is a difficult task. Adding the requirement that the platform should be capable of learning, from nothing, ways to generate its own movement makes the task even harder. Evolutionary Robotics is a promising field that combines the creativity of evolutionary optimization with the real-world focus of robotics to bring about unexpected control mechanisms in addition to whole new robot designs. Constructing a platform that is capable of these feats is difficult, and it is important to share experiences and lessons learned so that designers of future robot platforms can benefit. In this paper, we introduce our robotics platform and detail our experiences with real-world evolution. We present thoughts on initial design considerations and key insights we have learned from extensive experimentation. We hope to inspire new platform development and hopefully reduce the threshold of doing real-world legged robot evolution.
@inproceedings{Nygaard:2019ab, address = {Cham}, author = {Nygaard, T{\o}nnes F. and Nordmoen, J{\o}rgen and Ellefsen, Kai Olav and Martin, Charles P. and T{\o}rresen, Jim and Glette, Kyrre}, booktitle = {Nordic Artificial Intelligence Research and Development}, date-added = {2019-11-29 11:16:26 +1100}, date-modified = {2020-01-19 22:25:26 +1100}, doi = {10.1007/978-3-030-35664-4_6}, editor = {Bach, Kerstin and Ruocco, Massimiliano}, keywords = {refereed, conference-paper}, month = may, pages = {58--68}, publisher = {Springer International Publishing}, series = {Communications in Computer and Information Science}, title = {Experiences from Real-World Evolution with DyRET: Dynamic Robot for Embodied Testing}, volume = {1056}, year = {2019}, bdsk-url-1 = {https://doi.org/10.1007/978-3-030-35664-4_6} }
-
Aline Weber, Charles Patrick Martin, Jim Torresen, and Bruno C. da Silva. 2019. Identifying Reusable Early-Life Options. 2019 Joint IEEE 9th International Conference on Development and Learning and Epigenetic Robotics (ICDL-EpiRob), IEEE, pp. 335–340. http://doi.org/10.1109/DEVLRN.2019.8850725
DOI BibTex
@inproceedings{Weber:2019aa, author = {{Weber}, Aline and {Martin}, Charles Patrick and {Torresen}, Jim and {da Silva}, Bruno C.}, booktitle = {2019 Joint IEEE 9th International Conference on Development and Learning and Epigenetic Robotics (ICDL-EpiRob)}, date-added = {2019-10-03 10:50:10 +1000}, date-modified = {2019-10-03 10:51:07 +1000}, doi = {10.1109/DEVLRN.2019.8850725}, keywords = {conference-paper, refereed}, month = aug, pages = {335-340}, publisher = {IEEE}, title = {Identifying Reusable Early-Life Options}, year = {2019}, bdsk-url-1 = {https://doi.org/10.1109/DEVLRN.2019.8850725} }
-
Torgrim Rudland Næss and Charles Patrick Martin. 2019. A Physical Intelligent Instrument using Recurrent Neural Networks. Proceedings of the International Conference on New Interfaces for Musical Expression, UFRGS, pp. 79–82.
URL Preprint PDF BibTex
@inproceedings{Naess:2019aa, address = {Porto Alegre, Brazil}, author = {N{\ae}ss, Torgrim Rudland and Martin, Charles Patrick}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-added = {2019-06-10 13:02:03 +1000}, date-modified = {2019-07-16 10:17:40 +1000}, editor = {Queiroz, Marcelo and Sed{\'o}, Anna Xamb{\'o}}, keywords = {refereed, conference-paper, artificial-intelligence, music}, month = jun, pages = {79--82}, preprint = {https://metatonetransfer.com/preprints/2019-NIME-PhysicalIntelligentInstrument.pdf}, publisher = {UFRGS}, series = {NIME '19}, title = {A Physical Intelligent Instrument using Recurrent Neural Networks}, url = {http://www.nime.org/proceedings/2019/nime2019_016.pdf}, year = {2019}, bdsk-url-1 = {http://www.nime.org/proceedings/2019/nime2019_016.pdf} }
-
Andrei Faitas, Synne Engdahl Baumann, Torgrim Rudland Næss, Jim Torresen, and Charles Patrick Martin. 2019. Generating Convincing Harmony Parts with Bidirectional Long Short-Term Memory Networks. Proceedings of the International Conference on New Interfaces for Musical Expression, UFRGS, pp. 325–330.
URL Preprint PDF BibTex
@inproceedings{Faitas:2019aa, address = {Porto Alegre, Brazil}, author = {Faitas, Andrei and Baumann, Synne Engdahl and N{\ae}ss, Torgrim Rudland and Torresen, Jim and Martin, Charles Patrick}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-added = {2019-06-10 12:59:32 +1000}, date-modified = {2019-07-16 10:20:58 +1000}, editor = {Queiroz, Marcelo and Sed{\'o}, Anna Xamb{\'o}}, keywords = {refereed, conference-paper, artificial-intelligence, music}, month = jun, pages = {325--330}, preprint = {https://metatonetransfer.com/preprints/2019-NIME-GeneratingHarmonyBLSTM.pdf}, publisher = {UFRGS}, series = {NIME '19}, title = {Generating Convincing Harmony Parts with Bidirectional Long Short-Term Memory Networks}, url = {http://www.nime.org/proceedings/2019/nime2019_062.pdf}, year = {2019}, bdsk-url-1 = {http://www.nime.org/proceedings/2019/nime2019_062.pdf} }
-
Charles Patrick Martin and Jim Torresen. 2019. An Interactive Musical Prediction System with Mixture Density Recurrent Neural Networks. Proceedings of the International Conference on New Interfaces for Musical Expression, UFRGS, pp. 260–265.
URL arXiv Preprint PDF Video BibTex
@inproceedings{Martin:2019aa, address = {Porto Alegre, Brazil}, archiveprefix = {arXiv}, author = {Martin, Charles Patrick and Torresen, Jim}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-added = {2019-01-30 21:23:28 +0100}, date-modified = {2019-07-16 10:19:00 +1000}, editor = {Queiroz, Marcelo and Sed{\'o}, Anna Xamb{\'o}}, eprint = {1904.05009}, keywords = {refereed, conference-paper, artificial-intelligence, music}, month = jun, pages = {260--265}, preprint = {http://urn.nb.no/URN:NBN:no-77374}, publisher = {UFRGS}, series = {NIME '19}, title = {An Interactive Musical Prediction System with Mixture Density Recurrent Neural Networks}, url = {http://www.nime.org/proceedings/2019/nime2019_050.pdf}, video = {https://youtu.be/Kdmhrp2dfHw}, year = {2019}, bdsk-url-1 = {http://www.nime.org/proceedings/2019/nime2019_050.pdf} }
-
Tønnes F. Nygaard, Charles P. Martin, Jim Torresen, and Kyrre Glette. 2019. Evolving Robots on Easy Mode: Towards a Variable Complexity Controller for Quadrupeds. International Conference on the Applications of Evolutionary Computation, Springer International Publishing, pp. 616–632. http://doi.org/10.1007/978-3-030-16692-2_41
DOI Preprint PDF BibTex
@inproceedings{Nygaard:2019aa, address = {Cham}, author = {Nygaard, T{\o}nnes F. and Martin, Charles P. and Torresen, Jim and Glette, Kyrre}, booktitle = {International Conference on the Applications of Evolutionary Computation}, date-added = {2019-01-30 16:21:02 +0100}, date-modified = {2019-08-14 09:22:37 +1000}, doi = {10.1007/978-3-030-16692-2_41}, editor = {Kaufmann, Paul and Castillo, Pedro A.}, keywords = {conference-paper, refereed, artificial-intelligence}, month = apr, pages = {616--632}, preprint = {http://urn.nb.no/URN:NBN:no-77881}, publisher = {Springer International Publishing}, title = {Evolving Robots on Easy Mode: Towards a Variable Complexity Controller for Quadrupeds}, year = {2019}, bdsk-url-1 = {https://doi.org/10.1007/978-3-030-16692-2_41} }
-
Benedikte Wallace and Charles P. Martin. 2019. Comparing Models for Harmony Prediction in an Interactive Audio Looper. International Conference on Computational Intelligence in Music, Sound, Art and Design, Springer International Publishing, pp. 173–187. http://doi.org/10.1007/978-3-030-16667-0_12
DOI Preprint PDF BibTex
@inproceedings{Wallace:2019aa, address = {Cham}, author = {Wallace, Benedikte and Martin, Charles P.}, booktitle = {International Conference on Computational Intelligence in Music, Sound, Art and Design}, date-added = {2019-01-30 15:57:22 +0100}, date-modified = {2019-08-14 09:22:49 +1000}, doi = {10.1007/978-3-030-16667-0_12}, editor = {Ek{\'a}rt, Anik{\'o} and Liapis, Antonios and Castro Pena, Mar{\'\i}a Luz}, keywords = {conference-paper, refereed, artificial-intelligence}, month = apr, pages = {173--187}, preprint = {https://metatonetransfer.com/preprints/2019-ComparingModelsForHarmonyPrediction.pdf}, publisher = {Springer International Publishing}, title = {Comparing Models for Harmony Prediction in an Interactive Audio Looper}, year = {2019}, bdsk-url-1 = {https://doi.org/10.1007/978-3-030-16667-0_12} }
-
Tønnes F. Nygaard, Charles P. Martin, Jim Torresen, and Kyrre Glette. 2019. Self-Modifying Morphology Experiments with DyRET: Dynamic Robot for Embodied Testing. Proc. of the IEEE Int. Conf. on Robotics & Automation (ICRA), IEEE, pp. 9446–9452. http://doi.org/10.1109/ICRA.2019.8793663
DOI URL arXiv Preprint PDF Video BibTex
@inproceedings{Nygaard:2018ab, archiveprefix = {arXiv}, author = {Nygaard, T{\o}nnes F. and Martin, Charles P. and Torresen, Jim and Glette, Kyrre}, booktitle = {Proc. of the IEEE Int. Conf. on Robotics \& Automation (ICRA)}, date-added = {2018-02-28 21:29:41 +0000}, date-modified = {2019-08-13 16:06:57 +1000}, doi = {10.1109/ICRA.2019.8793663}, eprint = {1803.05629}, keywords = {refereed, conference-paper, artificial-intelligence}, month = may, pages = {9446--9452}, preprint = {http://urn.nb.no/URN:NBN:no-73716}, publisher = {IEEE}, title = {Self-Modifying Morphology Experiments with {DyRET}: Dynamic Robot for Embodied Testing}, url = {https://arxiv.org/abs/1803.05629}, video = {https://youtu.be/DLVc0JOWaUM}, year = {2019}, bdsk-url-1 = {https://arxiv.org/abs/1803.05629}, bdsk-url-2 = {https://doi.org/10.1109/ICRA.2019.8793663} }
-
Christina Hopgood, Charles P. Martin, and Gisli Johann Gretarsson. 2019. Synesthetic: Composing works for Marimba and Automated Lighting. Proceedings of the Australasian Computer Music Conference, Australasian Computer Music Association, pp. 23–27.
URL Preprint PDF BibTex
@inproceedings{Hopgood:2018aa, address = {Fitzroy, Australia}, author = {Hopgood, Christina and Martin, Charles P. and Gretarsson, Gisli Johann}, booktitle = {Proceedings of the {Australasian} Computer Music Conference}, date-added = {2018-02-18 19:35:00 +0000}, date-modified = {2020-02-04 17:12:44 +1100}, keywords = {conference-paper, refereed, artistic-research,}, month = jul, pages = {23--27}, preprint = {http://hdl.handle.net/1885/201503}, publisher = {Australasian Computer Music Association}, title = {Synesthetic: Composing works for Marimba and Automated Lighting}, url = {http://hdl.handle.net/1885/201503}, year = {2019}, bdsk-url-1 = {https://metatonetransfer.com/preprints/2018-Synesthetic.pdf} }
-
Charles Patrick Martin and Henry Gardner. 2019. Free-Improvised Rehearsal-as-Research for Musical HCI. In New Directions in Music and Human-Computer Interaction, Simon Holland, Tom Mudd, Katie Wilkie-McKenna, Andrew McPherson and Marcelo M. Wanderley (eds.). Springer, Cham, 269–284. http://doi.org/10.1007/978-3-319-92069-6_17
DOI URL Preprint PDF BibTex
@incollection{Martin:2018aa, address = {Cham}, author = {Martin, Charles Patrick and Gardner, Henry}, booktitle = {New Directions in Music and Human-Computer Interaction}, date-modified = {2024-04-12 11:37:35 +1000}, doi = {10.1007/978-3-319-92069-6_17}, editor = {Holland, Simon and Mudd, Tom and Wilkie-McKenna, Katie and McPherson, Andrew and Wanderley, Marcelo M.}, keywords = {book-chapter, refereed}, month = feb, pages = {269--284}, preprint = {http://urn.nb.no/URN:NBN:no-71949}, url = {http://urn.nb.no/URN:NBN:no-71949}, publisher = {Springer}, series = {Springer Series on Cultural Computing}, title = {Free-Improvised Rehearsal-as-Research for Musical {HCI}}, year = {2019} }
2018
-
Tønnes F. Nygaard, Charles P. Martin, Jim Tørresen, and Kyrre Glette. 2018. Exploring Mechanically Self-Reconfiguring Robots for Autonomous Design. ICRA Workshop on Autonomous Robot Design.
URL arXiv BibTex
@inproceedings{Nygaard:2018ac, archiveprefix = {arXiv}, author = {Nygaard, T{\o}nnes F. and Martin, Charles P. and T{\o}rresen, Jim and Glette, Kyrre}, booktitle = {{ICRA} Workshop on Autonomous Robot Design}, date-added = {2020-02-04 17:29:05 +1100}, date-modified = {2020-02-04 23:16:43 +1100}, eprint = {1805.02965}, keywords = {workshop-paper, conference-paper, non-refereed}, month = apr, title = {Exploring Mechanically Self-Reconfiguring Robots for Autonomous Design}, url = {http://arxiv.org/abs/1805.02965}, year = {2018}, bdsk-url-1 = {http://arxiv.org/abs/1805.02965} }
-
Charles Patrick Martin. 2018. Myo-to-OSC: A pure-python cross-platform solution for simply connecting Myo armbands to OSC-connected software. http://doi.org/10.5281/zenodo.1216169
DOI BibTex
@misc{Martin:2018aj, author = {Martin, Charles Patrick}, date-added = {2019-02-07 11:14:21 +0100}, date-modified = {2019-02-07 11:15:20 +0100}, doi = {10.5281/zenodo.1216169}, howpublished = {Git Repository}, keywords = {open-source, open-source-code-project, non-refereed, github}, month = apr, title = {Myo-to-OSC: A pure-python cross-platform solution for simply connecting Myo armbands to OSC-connected software.}, year = {2018}, bdsk-url-1 = {https://doi.org/10.5281/zenodo.1216169} }
-
Charles Patrick Martin. 2018. Creative-Prediction: Tutorials and Walkthroughs for Predicting Creative Data with Neural Networks. http://doi.org/10.5281/zenodo.1494039
DOI URL BibTex
@misc{Martin:2018ad, author = {Martin, Charles Patrick}, date-added = {2019-02-07 11:10:17 +0100}, date-modified = {2019-02-07 11:13:12 +0100}, doi = {10.5281/zenodo.1494039}, howpublished = {Git Repository and Website}, keywords = {open-source-code-project, open-source, artificial-intelligence, github, non-refereed,}, month = nov, title = {Creative-Prediction: Tutorials and Walkthroughs for Predicting Creative Data with Neural Networks}, url = {https://creativeprediction.xyz}, year = {2018}, bdsk-url-1 = {https://creativeprediction.xyz}, bdsk-url-2 = {https://doi.org/10.5281/zenodo.1494039} }
-
Charles P. Martin and Jim Torresen. 2018. Predictive Musical Interaction with MDRNNs. NeurIPS 2018 Workshop on Machine Learning for Creativity and Design. http://doi.org/10.5281/zenodo.2558826
DOI URL Preprint PDF BibTex
@inproceedings{Martin:2018ai, address = {Montr{\'e}al, Canada}, author = {Martin, Charles P. and Torresen, Jim}, booktitle = {NeurIPS 2018 Workshop on Machine Learning for Creativity and Design}, date-added = {2018-12-04 15:39:37 +0100}, date-modified = {2019-02-07 10:15:47 +0100}, doi = {10.5281/zenodo.2558826}, keywords = {conference-paper, workshop-paper, refereed}, month = dec, preprint = {https://metatonetransfer.com/preprints/2018-predictive-musical-interaction-with-MDRNNs.pdf}, title = {Predictive Musical Interaction with MDRNNs}, url = {https://nips2018creativity.github.io/doc/Predictive_Musical_Interaction_with_MDRNNs.pdf}, year = {2018}, bdsk-url-1 = {https://nips2018creativity.github.io/doc/Predictive_Musical_Interaction_with_MDRNNs.pdf}, bdsk-url-2 = {https://doi.org/10.5281/zenodo.2558826} }
-
Charles Patrick Martin, Kyrre Glette, and Jim Tørresen. 2018. Tutorial on Creative Prediction with Neural Networks. The 2018 Conference on Artificial Life.
URL BibTex
@inproceedings{Martin:2018ah, address = {Tokyo, Japan}, author = {Martin, Charles Patrick and Glette, Kyrre and T{\o}rresen, Jim}, booktitle = {The 2018 Conference on Artificial Life}, date-added = {2018-09-10 23:22:44 +0200}, date-modified = {2019-01-08 16:44:30 +0100}, keywords = {conference-presentation, non-refereed, tutorial}, month = jul, series = {ALife '18}, title = {Tutorial on Creative Prediction with Neural Networks}, url = {https://cpmpercussion.github.io/creative-prediction/}, year = {2018}, bdsk-url-1 = {https://cpmpercussion.github.io/creative-prediction/} }
-
Enrique Alejandro Garcia Ceja, Kai Olav Ellefsen, Charles Patrick Martin, and Jim Tørresen. 2018. Tutorial on Prediction, Interaction, and User Behaviour. IEEE World Congress on Computational Intelligence.
URL BibTex
@inproceedings{Garcia-Ceja:2018aa, address = {Rio de Janeiro, Brazil}, author = {Garcia Ceja, Enrique Alejandro and Ellefsen, Kai Olav and Martin, Charles Patrick and T{\o}rresen, Jim}, booktitle = {IEEE World Congress on Computational Intelligence}, date-added = {2018-09-10 23:21:06 +0200}, date-modified = {2019-01-08 16:44:24 +0100}, keywords = {conference-presentation, non-refereed, tutorial}, month = jul, series = {WCCI '18}, title = {Tutorial on Prediction, Interaction, and User Behaviour}, url = {https://kaiolae.github.io/wcci2018_prediction_tutorial/}, year = {2018}, bdsk-url-1 = {https://kaiolae.github.io/wcci2018_prediction_tutorial/} }
-
Charles P. Martin, Kyrre Glette, Tønnes F. Nygaard, and Jim Torresen. 2018. Self-Awareness in a Cyber-Physical Predictive Musical Interface. Self-Awareness in Cyber-Physical Systems Workshop, Aston University.
URL BibTex
@inproceedings{Martin:2018af, address = {Birmingham, UK}, author = {Martin, Charles P. and Glette, Kyrre and Nygaard, T{\o}nnes F. and Torresen, Jim}, booktitle = {Self-Awareness in Cyber-Physical Systems Workshop}, date-added = {2018-09-10 23:06:33 +0200}, date-modified = {2019-01-08 16:42:00 +0100}, keywords = {conference-presentation, non-refereed, poster, extended-abstract}, month = apr, publisher = {Aston University}, series = {SelPhyS 2018}, title = {Self-Awareness in a Cyber-Physical Predictive Musical Interface}, url = {https://metatonetransfer.com/preprints/2018-self-aware-music-interface-preprint.pdf}, year = {2018}, bdsk-url-1 = {https://metatonetransfer.com/preprints/2018-self-aware-music-interface-preprint.pdf} }
-
Charles Patrick Martin and Alexander Refsum Jensenius. 2018. Stillness Under Tension: Performance for Myo armbands and Bela embedded computers. Musical Program of the International Conference on New Interfaces for Musical Expression.
BibTex
@inproceedings{Martin:2018ae, author = {Martin, Charles Patrick and Jensenius, Alexander Refsum}, booktitle = {Musical Program of the International Conference on New Interfaces for Musical Expression}, date-added = {2018-09-10 23:04:14 +0200}, date-modified = {2018-09-10 23:20:36 +0200}, keywords = {artistic-performance, refereed,}, month = jun, series = {NIME '18}, title = {Stillness Under Tension: Performance for {Myo} armbands and {Bela} embedded computers}, year = {2018} }
-
Charles P. Martin, Alexander Refsum Jensenius, and Jim Torresen. 2018. Composing an Ensemble Standstill Work for Myo and Bela. Proceedings of the International Conference on New Interfaces for Musical Expression, pp. 196–197. http://doi.org/10.5281/zenodo.1302543
DOI URL arXiv Preprint PDF BibTex
@inproceedings{Martin:2018ab, archiveprefix = {arXiv}, author = {Martin, Charles P. and Jensenius, Alexander Refsum and Torresen, Jim}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-added = {2018-02-18 19:56:30 +0000}, date-modified = {2020-12-09 15:29:34 +1100}, doi = {10.5281/zenodo.1302543}, eprint = {2012.02404}, keywords = {conference-paper, refereed}, month = jun, pages = {196--197}, preprint = {http://urn.nb.no/URN:NBN:no-68187}, series = {NIME '18}, title = {Composing an Ensemble Standstill Work for Myo and Bela}, url = {http://urn.nb.no/URN:NBN:no-68187}, year = {2018} }
-
Victor Evaristo Gonzalez Sanchez, Agata Zelechowska, Charles P. Martin, Victoria Johnson, Kari Anne Vadstensvik Bjerkestrand, and Alexander Refsum Jensenius. 2018. Bela-Based Augmented Acoustic Guitars for Inverse Sonic Microinteraction. Proceedings of the International Conference on New Interfaces for Musical Expression. http://doi.org/10.5281/zenodo.1302599
DOI URL Preprint PDF BibTex
@inproceedings{Sanchez:2018aa, author = {Gonzalez Sanchez, Victor Evaristo and Zelechowska, Agata and Martin, Charles P. and Johnson, Victoria and Bjerkestrand, Kari Anne Vadstensvik and Jensenius, Alexander Refsum}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-added = {2018-02-18 19:54:36 +0000}, date-modified = {2019-02-07 09:57:17 +0100}, doi = {10.5281/zenodo.1302599}, keywords = {conference-paper, refereed}, month = jun, preprint = {http://urn.nb.no/URN:NBN:no-68219}, series = {NIME '18}, title = {Bela-Based Augmented Acoustic Guitars for Inverse Sonic Microinteraction}, url = {http://urn.nb.no/URN:NBN:no-68219}, year = {2018} }
-
Tønnes F. Nygaard, Charles P. Martin, Eivind Samuelsen, Jim Torresen, and Kyrre Glette. 2018. Real-World Evolution Adapts Robot Morphology and Control to Hardware Limitations. Proceedings of the Genetic and Evolutionary Computation Conference, ACM, pp. 125–132. http://doi.org/10.1145/3205455.3205567
DOI URL Preprint PDF Video BibTex
@inproceedings{Nygaard:2018aa, address = {New York, NY, USA}, author = {Nygaard, T{\o}nnes F. and Martin, Charles P. and Samuelsen, Eivind and Torresen, Jim and Glette, Kyrre}, booktitle = {Proceedings of the Genetic and Evolutionary Computation Conference}, date-added = {2018-02-18 19:51:47 +0000}, date-modified = {2018-09-10 23:02:18 +0200}, doi = {10.1145/3205455.3205567}, keywords = {conference-paper, refereed}, month = jul, pages = {125--132}, preprint = {http://urn.nb.no/URN:NBN:no-68422}, publisher = {{ACM}}, series = {GECCO '18}, title = {Real-World Evolution Adapts Robot Morphology and Control to Hardware Limitations}, url = {http://urn.nb.no/URN:NBN:no-68422}, video = {https://youtu.be/7r3cV6RuFJU}, year = {2018} }
-
Charles P. Martin and Jim Torresen. 2018. RoboJam: A Musical Mixture Density Network for Collaborative Touchscreen Interaction. Computational Intelligence in Music, Sound, Art and Design: International Conference, EvoMUSART, Springer International Publishing, pp. 161–176. http://doi.org/10.1007/978-3-319-77583-8_11
DOI URL arXiv Preprint PDF Video BibTex
@inproceedings{Martin:2018ag, address = {Switzerland}, archiveprefix = {arXiv}, author = {Martin, Charles P. and Torresen, Jim}, booktitle = {Computational Intelligence in Music, Sound, Art and Design: International Conference, {EvoMUSART}}, date-modified = {2019-02-07 09:59:30 +0100}, doi = {10.1007/978-3-319-77583-8_11}, editor = {Liapis, Antonios and Romero Cardalda, Juan Jes{\'u}s and Ek{\'a}rt, Anik{\'o}}, eprint = {1711.10746}, keywords = {conference-paper, refereed}, month = apr, pages = {161--176}, publisher = {Springer International Publishing}, series = {Lecture Notes in Computer Science}, title = {{RoboJam}: A Musical Mixture Density Network for Collaborative Touchscreen Interaction}, preprint = {http://urn.nb.no/URN:NBN:no-67979}, url = {http://arxiv.org/abs/1711.10746}, video = {https://youtu.be/rQbg1AAnWb8}, volume = {10783}, year = {2018}, bdsk-url-1 = {http://arxiv.org/abs/1711.10746} }
2017
-
Charles P. Martin. 2017. Musical Networks and Creative AI. Technology and Emotions.
BibTex
@inproceedings{Martin:2017ao, address = {Oslo, Norway}, author = {Martin, Charles P.}, booktitle = {Technology and Emotions}, date-added = {2018-02-18 21:45:53 +0000}, date-modified = {2019-01-08 16:43:12 +0100}, keywords = {conference-presentation, non-refereed}, month = nov, title = {Musical Networks and Creative {AI}}, year = {2017} }
-
Charles Patrick Martin, Victor Evaristo Gonzalez Sanchez, Tejaswinee Kelkar, et al. 2017. Ensemble Metatone: 3-hour improvised touchscreen performance. Elvelangs i Fakkellys (River walk by lantern light).
URL Video BibTex
@inproceedings{Martin:2017an, author = {Martin, Charles Patrick and Gonzalez Sanchez, Victor Evaristo and Kelkar, Tejaswinee and Zelechowska, Agata and Berggren, Stig Johan and Hopgood, Christina and Wallace, Benedikte and Brustad, Henrik and Utne-Reitan, Bj{\o}rnar and Ellefsen, Kai Olav and Nygaard, T{\o}nnes Frostad and S{\o}yseth, Vegard D{\o}nnem and C{\^a}mara, Guilherme Schmidt and Diaz, Ximena Alarc{\'o}n}, booktitle = {Elvelangs i Fakkellys (River walk by lantern light)}, date-added = {2018-02-18 21:42:11 +0000}, date-modified = {2019-01-08 16:43:38 +0100}, keywords = {artistic-performance, non-refereed}, month = sep, title = {Ensemble Metatone: 3-hour improvised touchscreen performance}, url = {https://youtu.be/NepiJe-TB_Q}, video = {https://youtu.be/NepiJe-TB_Q}, year = {2017}, bdsk-url-1 = {https://youtu.be/NepiJe-TB_Q} }
-
Charles P. Martin. 2017. MicroJam: A Social App for Making Music. Boost - Technology and Equality in Music, JM Norway.
BibTex
@inproceedings{Martin:2017am, address = {Sentralen, Oslo, Norway}, author = {Martin, Charles P.}, booktitle = {Boost - Technology and Equality in Music}, date-added = {2018-02-18 21:41:13 +0000}, date-modified = {2018-02-21 16:28:33 +0000}, keywords = {conference-presentation, non-refereed, invited}, month = jun, publisher = {JM Norway}, title = {MicroJam: A Social App for Making Music}, year = {2017} }
-
Charles Patrick Martin. 2017. Making Social Music with MicroJam. Cutting Edge Festival: Future Planet, Future Society, Future You.
BibTex
@inproceedings{Martin:2017al, address = {Oslo, Norway}, author = {Martin, Charles Patrick}, booktitle = {Cutting Edge Festival: Future Planet, Future Society, Future You}, date-added = {2018-02-18 21:39:29 +0000}, date-modified = {2019-01-08 16:43:21 +0100}, keywords = {conference-presentation, non-refereed, invited,}, month = sep, title = {Making Social Music with {MicroJam}}, year = {2017} }
-
Charles P. Martin. 2017. Musical Networks: Using Recurrent Neural Networks to Model and Complement Musical Creativity. Musikkteknologidagene, Norwegian Academy of Music. http://doi.org/10.13140/RG.2.2.27125.06887
DOI BibTex
@inproceedings{Martin:2017ak, address = {Oslo, Norway}, author = {Martin, Charles P.}, booktitle = {Musikkteknologidagene}, date-added = {2018-02-18 21:38:07 +0000}, date-modified = {2019-01-08 16:42:37 +0100}, doi = {10.13140/RG.2.2.27125.06887}, keywords = {conference-presentation, non-refereed}, month = oct, publisher = {Norwegian Academy of Music}, title = {Musical Networks: Using Recurrent Neural Networks to Model and Complement Musical Creativity}, year = {2017}, bdsk-url-1 = {http://dx.doi.org/10.13140/RG.2.2.27125.06887} }
-
Alexander Refsum Jensenius, Kari Anne Vadstensvik Bjerkestrand, Victoria Johnson, Victor Evaristo Gonzalez Sanchez, Agata Zelechowska, and Charles Patrick Martin. 2017. Sverm-Resonans: Performance Installation for Acoustically-Activated Guitars. Program of the Ultima Contemporary Music Festival, Oslo, Ultima.
URL Video BibTex
@inproceedings{Jensenius:2017aa, address = {Sentralen, Oslo, Norway}, author = {Jensenius, Alexander Refsum and Bjerkestrand, Kari Anne Vadstensvik and Johnson, Victoria and Gonzalez Sanchez, Victor Evaristo and Zelechowska, Agata and Martin, Charles Patrick}, booktitle = {{Program of the Ultima Contemporary Music Festival, Oslo}}, date-added = {2018-02-18 21:33:41 +0000}, date-modified = {2019-01-08 16:42:52 +0100}, keywords = {artistic-performance, non-refereed}, month = sep, publisher = {Ultima}, title = {Sverm-Resonans: Performance Installation for Acoustically-Activated Guitars}, url = {http://www.uio.no/english/research/groups/fourms/projects/sverm/events/2017/ultima/index.html}, video = {https://youtu.be/fhTuI3AhZL0}, year = {2017}, bdsk-url-1 = {https://youtu.be/fhTuI3AhZL0} }
-
Charles P. Martin and Jim Torresen. 2017. MicroJam: An App for Sharing Tiny Touch-Screen Performances. Proceedings of the International Conference on New Interfaces for Musical Expression, Aalborg University Copenhagen, pp. 495–496. http://doi.org/10.5281/zenodo.1176334
Abstract DOI URL Preprint PDF Video BibTexMicroJam is a mobile app for sharing tiny touch-screen performances. Mobile applications that streamline creativity and social interaction have enabled a very broad audience to develop their own creative practices. While these apps have been very successful in visual arts (particularly photography), the idea of social music-making has not had such a broad impact. MicroJam includes several novel performance concepts intended to engage the casual music maker and inspired by current trends in social creativity support tools. Touch-screen performances are limited to five seconds, instrument settings are posed as sonic “filters”, and past performances are arranged as a timeline with replies and layers. These features of MicroJam encourage users not only to perform music more frequently, but to engage with others in impromptu ensemble music making.
@inproceedings{Martin:2017ab, address = {Denmark}, author = {Martin, Charles P. and Torresen, Jim}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, conference-url = {http://www.nime.org/proceedings/2017/nime2017_paper0096.pdf}, date-modified = {2018-04-25 14:22:08 +0000}, doi = {10.5281/zenodo.1176334}, keywords = {conference-paper, refereed}, month = may, pages = {495--496}, preprint = {http://urn.nb.no/URN:NBN:no-58823}, publisher = {Aalborg University Copenhagen}, series = {NIME '17}, title = {MicroJam: An App for Sharing Tiny Touch-Screen Performances}, url = {http://urn.nb.no/URN:NBN:no-58823}, video = {https://youtu.be/SkUjjQd13KU}, year = {2017}, bdsk-url-1 = {http://urn.nb.no/URN:NBN:no-58823} }
-
Charles P. Martin and Jim Torresen. 2017. Exploring Social Mobile Music with Tiny Touch-Screen Performances. Proceedings of the 14th Sound and Music Computing Conference, Aalto University, pp. 175–180. http://doi.org/10.5281/zenodo.1401907
DOI URL Preprint PDF Video BibTex
@inproceedings{Martin:2017ac, address = {Espoo, Finland}, author = {Martin, Charles P. and Torresen, Jim}, booktitle = {Proceedings of the 14th Sound and Music Computing Conference}, date-modified = {2019-02-07 10:06:37 +0100}, doi = {10.5281/zenodo.1401907}, editor = {Lokki, Tapio and P{\"a}tynen, Jukka and V{\"a}lim{\"a}ki, Vesa}, keywords = {conference-paper, refereed}, month = jul, pages = {175--180}, preprint = {http://urn.nb.no/URN:NBN:no-60558}, publisher = {Aalto University}, series = {SMC '17}, title = {Exploring Social Mobile Music with Tiny Touch-Screen Performances}, url = {http://urn.nb.no/URN:NBN:no-60558}, video = {https://youtu.be/SkUjjQd13KU}, year = {2017}, bdsk-url-1 = {http://smc2017.aalto.fi/media/materials/proceedings/SMC17_p175.pdf} }
-
Charles P. Martin, Kai Olav Ellefsen, and Jim Torresen. 2017. Deep Models for Ensemble Touch-Screen Improvisation. Proceedings of the 12th International Audio Mostly Conference on Augmented and Participatory Sound and Music Experiences. http://doi.org/10.1145/3123514.3123556
Abstract DOI Preprint PDF BibTexFor many, the pursuit and enjoyment of musical performance goes hand-in-hand with collaborative creativity, whether in a choir, jazz combo, orchestra, or rock band. However, few musical interfaces use the affordances of computers to create or enhance ensemble musical experiences. One possibility for such a system would be to use an artificial neural network (ANN) to model the way other musicians respond to a single performer. Some forms of music have well-understood rules for interaction; however, this is not the case for free improvisation with new touch-screen instruments where styles of interaction may be discovered in each new performance. This paper describes an ANN model of ensemble interactions trained on a corpus of such ensemble touch-screen improvisations. The results show realistic ensemble interactions and the model has been used to implement a live performance system where a performer is accompanied by the predicted and sonified touch gestures of three virtual players.
@inproceedings{Martin:2017ae, author = {Martin, Charles P. and Ellefsen, Kai Olav and Torresen, Jim}, booktitle = {Proceedings of the 12th International Audio Mostly Conference on Augmented and Participatory Sound and Music Experiences}, date-modified = {2018-02-18 20:23:05 +0000}, doi = {10.1145/3123514.3123556}, keywords = {conference-paper, refereed, artificial-intelligence}, month = aug, preprint = {http://urn.nb.no/URN:NBN:no-61228}, series = {AM '17}, title = {Deep Models for Ensemble Touch-Screen Improvisation}, year = {2017}, bdsk-url-1 = {https://dx.doi.org/10.1145/3123514.3123556} }
-
Charles Patrick Martin. 2017. Percussionist-Centred Design for Touchscreen Digital Musical Instruments. Contemporary Music Review 36, 1–2, 64–85. http://doi.org/10.1080/07494467.2017.1370794
DOI URL Preprint PDF BibTex
@article{Martin:2017af, author = {Martin, Charles Patrick}, date-modified = {2019-09-06 15:25:59 +1000}, doi = {10.1080/07494467.2017.1370794}, journal = {Contemporary Music Review}, keywords = {journal-article, refereed}, month = sep, number = {1--2}, pages = {64--85}, preprint = {http://urn.nb.no/URN:NBN:no-61222}, url = {http://urn.nb.no/URN:NBN:no-61222}, status = {published}, title = {Percussionist-Centred Design for Touchscreen Digital Musical Instruments}, volume = {36}, year = {2017}, bdsk-url-1 = {https://dx.doi.org/10.1080/07494467.2017.1370794} }
2016
-
Charles P. Martin. 2016. PhaseRings for iPad Ensemble and Ensemble Director Agent. Musical Program of the International Conference on Auditory Display, pp. 232–233.
URL Preprint PDF Video BibTex
@inproceedings{Martin:2016ac, author = {Martin, Charles P.}, booktitle = {Musical Program of the International Conference on Auditory Display}, date-added = {2018-02-18 20:35:21 +0000}, date-modified = {2018-02-21 21:02:07 +0000}, keywords = {artistic-performance, non-refereed}, month = jul, pages = {232--233}, preprint = {https://metatonetransfer.com/preprints/2016-ICAD-PhaseRings.pdf}, title = {PhaseRings for iPad Ensemble and Ensemble Director Agent}, type = {Musical Performance}, url = {http://www.icad.org/icad2016/proceedings/concert/ICAD2016_paper_99.pdf}, video = {https://youtu.be/aDEQMLwd8ok}, year = {2016}, bdsk-url-1 = {https://metatonetransfer.com/preprints/2016-ICAD-PhaseRings.pdf} }
-
Charles Martin and Henry Gardner. 2016. Can Machine Learning Apply to Musical Ensembles? Proceedings of the CHI Human-Centered Machine Learning Workshop. http://doi.org/10.5281/zenodo.56379
DOI URL BibTex
@inproceedings{Martin:2016aa, author = {Martin, Charles and Gardner, Henry}, booktitle = {Proceedings of the {CHI} Human-Centered Machine Learning Workshop}, conference-url = {http://hcml2016.goldsmithsdigital.com}, date-modified = {2018-02-21 21:16:27 +0000}, doi = {10.5281/zenodo.56379}, keywords = {conference-paper, workshop-paper, refereed}, month = may, title = {Can Machine Learning Apply to Musical Ensembles?}, url = {https://metatonetransfer.com/preprints/2016-CHI-HCML-MachineLearningEnsembles.pdf}, year = {2016}, bdsk-url-1 = {https://dx.doi.org/10.5281/zenodo.56379} }
-
Charles Martin and Henry Gardner. 2016. Free-Improvised Rehearsal-as-Research for Musical HCI. Proceedings of the CHI Musical HCI Workshop. http://doi.org/10.5281/zenodo.56378
DOI URL BibTex
@inproceedings{Martin:2016ab, author = {Martin, Charles and Gardner, Henry}, booktitle = {Proceedings of the {CHI} Musical {HCI} Workshop}, conference-url = {http://mcl.open.ac.uk/music-chi/}, date-modified = {2018-02-21 21:12:52 +0000}, doi = {10.5281/zenodo.56378}, keywords = {conference-paper, workshop-paper, refereed,}, month = may, title = {Free-Improvised Rehearsal-as-Research for Musical {HCI}}, url = {https://metatonetransfer.com/preprints/2016-CHI-MusicHCI-RehearsalAsResearch.pdf}, year = {2016}, bdsk-url-1 = {https://dx.doi.org/10.5281/zenodo.56378} }
-
Charles Martin and Henry Gardner. 2016. A Percussion-Focussed Approach to Preserving Touch-Screen Improvisation. In Curating the Digital: Spaces for Art and Interaction, David England, Thecla Schiphorst and Nick Bryan-Kinns (eds.). Springer International Publishing, Switzerland, 51–72. http://doi.org/10.1007/978-3-319-28722-5_5
DOI URL Preprint PDF BibTex
@incollection{Martin:2016rm, address = {Switzerland}, author = {Martin, Charles and Gardner, Henry}, booktitle = {Curating the Digital: Spaces for Art and Interaction}, date-modified = {2024-04-17 09:51:11 +0900}, doi = {10.1007/978-3-319-28722-5_5}, editor = {England, David and Schiphorst, Thecla and Bryan-Kinns, Nick}, keywords = {book-chapter, refereed,}, month = jul, pages = {51--72}, preprint = {http://hdl.handle.net/1885/316737}, publisher = {Springer International Publishing}, series = {Springer Series on Cultural Computing}, title = {A Percussion-Focussed Approach to Preserving Touch-Screen Improvisation}, url = {http://hdl.handle.net/1885/316737}, year = {2016}, bdsk-url-1 = {https://dx.doi.org/10.1007/978-3-319-28722-5_5} }
-
Charles Martin, Henry Gardner, Ben Swift, and Michael Martin. 2016. Intelligent Agents and Networked Buttons Improve Free-Improvised Ensemble Music-Making on Touch-Screens. Proceedings of the SIGCHI Conference on Human Factors in Computing Systems, ACM, pp. 2295–2306. http://doi.org/10.1145/2858036.2858269
DOI Preprint PDF Video BibTex
@inproceedings{Martin:2016vn, address = {New York, NY, USA}, author = {Martin, Charles and Gardner, Henry and Swift, Ben and Martin, Michael}, booktitle = {Proceedings of the {SIGCHI} Conference on Human Factors in Computing Systems}, date-modified = {2018-02-22 20:40:57 +0000}, doi = {10.1145/2858036.2858269}, keywords = {conference-paper, refereed}, month = may, pages = {2295-2306}, preprint = {https://metatonetransfer.com/preprints/2016-CHI-intelligent-agents-networked-buttons.pdf}, publisher = {ACM}, series = {CHI '16}, title = {Intelligent Agents and Networked Buttons Improve Free-Improvised Ensemble Music-Making on Touch-Screens}, video = {https://youtu.be/lYz1DOfPmnY}, year = {2016}, bdsk-url-1 = {https://dx.doi.org/10.1145/2858036.2858269} }
2015
-
Charles Martin. 2015. Decoding Performance with Data. Musicological Society of Australia National Conference, Sydney Conservatorium of Music.
URL Preprint PDF BibTex
@inproceedings{Martin:2015aa, address = {Sydney, Australia}, author = {Martin, Charles}, booktitle = {{Musicological Society of Australia National Conference}}, date-added = {2018-02-18 20:57:40 +0000}, date-modified = {2019-01-08 16:43:42 +0100}, keywords = {conference-presentation, non-refereed}, month = sep, preprint = {https://metatonetransfer.com/preprints/2015-MSA-DecodingPerformanceWithData.pdf}, publisher = {Sydney Conservatorium of Music}, title = {Decoding Performance with Data}, url = {https://metatonetransfer.com/preprints/2015-MSA-DecodingPerformanceWithData.pdf}, year = {2015}, bdsk-url-1 = {https://metatonetransfer.com/preprints/2015-MSA-DecodingPerformanceWithData.pdf} }
-
Charles Martin, Henry Gardner, Ben Swift, and Michael Martin. 2015. Music of 18 Performances: Evaluating Apps and Agents with Free Improvisation. Proceedings of the 2015 Conference of the Australasian Computer Music Association, Australasian Computer Music Association, pp. 85–94.
Abstract URL Preprint PDF BibTexWe present a study where a small group of experienced iPad musicians evaluated a system of three musical touch-screen apps and two server-based agents over 18 controlled improvisations. The performers’ perspectives were recorded through surveys, interviews, and interaction data. Our agent classifies the touch gestures of the performers and identifies new sections in the improvisations while a control agent returns similar messages sourced from a statistical model. The three touch-screen apps respond according to design paradigms of reward, support, and disruption. In this study of an ongoing musical practice, significant effects were observed due to the apps’ interfaces and how they respond to agent interactions. The “reward” app received the highest ratings. The results were used to iterate the app designs for later performances.
@inproceedings{Martin:2015cr, author = {Martin, Charles and Gardner, Henry and Swift, Ben and Martin, Michael}, booktitle = {Proceedings of the 2015 Conference of the {Australasian Computer Music Association}}, date-modified = {2018-02-18 20:47:58 +0000}, editor = {Drummond, Jon and Hewitt, Donna and Lerner, Sophea and Stevenson, Ian}, keywords = {conference-paper, refereed}, month = nov, numpages = {10}, pages = {85--94}, preprint = {https://metatonetransfer.com/preprints/2015-ACMC-Music18Performances.pdf}, publisher = {Australasian Computer Music Association}, series = {ACMC2015 - MAKE!}, title = {Music of 18 Performances: Evaluating Apps and Agents with Free Improvisation}, url = {http://hdl.handle.net/1885/95205}, year = {2015}, bdsk-url-1 = {http://hdl.handle.net/1885/95205} }
-
Charles Martin, Henry Gardner, and Ben Swift. 2015. Tracking Ensemble Performance on Touch-Screens with Gesture Classification and Transition Matrices. Proceedings of the International Conference on New Interfaces for Musical Expression, Louisiana State University, pp. 359–364. http://doi.org/10.5281/zenodo.1179130
Abstract DOI URL arXiv Preprint PDF BibTexWe present and evaluate a novel interface for tracking ensemble performances on touch-screens. The system uses a Random Forest classifier to extract touch-screen gestures and transition matrix statistics. It analyses the resulting gesture-state sequences across an ensemble of performers. A series of specially designed iPad apps respond to this real-time analysis of free-form gestural performances with calculated modifications to their musical interfaces. We describe our system and evaluate it through cross-validation and profiling as well as concert experience.
@inproceedings{Martin:2015jk, address = {Baton Rouge, Louisiana, USA}, archiveprefix = {arXiv}, author = {Martin, Charles and Gardner, Henry and Swift, Ben}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-modified = {2020-12-04 14:19:25 +1100}, doi = {10.5281/zenodo.1179130}, editor = {Berdahl, Edgar and Allison, Jesse}, eprint = {2012.00296}, keywords = {conference-paper, refereed}, month = may, pages = {359--364}, preprint = {https://metatonetransfer.com/preprints/2015-NIME-TrackingEnsemblePerformance.pdf}, publisher = {Louisiana State University}, title = {Tracking Ensemble Performance on Touch-Screens with Gesture Classification and Transition Matrices}, url = {http://www.nime.org/proceedings/2015/nime2015_242.pdf}, year = {2015}, bdsk-url-1 = {http://www.nime.org/proceedings/2015/nime2015_242.pdf} }
-
Charles Martin and Henry Gardner. 2015. That Syncing Feeling: Networked Strategies for Enabling Ensemble Creativity in iPad Musicians. Proceedings of CreateWorld, Griffith University.
Abstract URL Preprint PDF BibTexThe group experience of synchronisation is a key aspect of ensemble musical performance. This paper presents a number of strategies for syncing performance information across networked iPad-instruments to enable creativity among an ensemble of improvising musicians. Acoustic instrumentalists sync without mechanical intervention. Electronic instruments frequently synchronise rhythm using MIDI or OSC connections. In contrast, our system syncs other aspects of performance, such as tonality, instrument functions, and gesture classifications, to support and enhance improvised performance. Over a number of performances with an iPad and percussion group, Ensemble Metatone, various syncing scenarios have been explored that support, extend, and disrupt ensemble creativity.
@inproceedings{Martin:2015mz, address = {Brisbane, Australia}, author = {Martin, Charles and Gardner, Henry}, booktitle = {Proceedings of {C}reate{W}orld}, date-modified = {2019-01-08 16:40:04 +0100}, keywords = {conference-paper, refereed}, month = feb, preprint = {https://metatonetransfer.com/preprints/2015-CreateWorld-SynchingFeeling.pdf}, publisher = {Griffith University}, title = {That Syncing Feeling: Networked Strategies for Enabling Ensemble Creativity in i{P}ad Musicians}, url = {http://hdl.handle.net/1885/95216}, year = {2015}, bdsk-url-1 = {http://hdl.handle.net/1885/95216} }
2014
-
Charles Martin. 2014. iPads in Percussion Ensemble. Percussive Arts Society International Convention.
URL Video BibTex
@inproceedings{Martin:2014ad, address = {Indianapolis, USA}, author = {Martin, Charles}, booktitle = {{Percussive Arts Society International Convention}}, date-added = {2018-02-18 22:11:03 +0000}, date-modified = {2019-01-08 16:43:26 +0100}, keywords = {conference-presentation, non-refereed,}, month = nov, series = {PASIC '14}, title = {{iPads} in Percussion Ensemble}, url = {https://youtu.be/oyW3mFyoz-I}, video = {https://youtu.be/oyW3mFyoz-I}, year = {2014}, bdsk-url-1 = {https://youtu.be/oyW3mFyoz-I} }
-
Charles Martin, Christina Hopgood, Jonathan Griffiths, and Yvonne Lam. 2014. Colour Music Concert with Ensemble Metatone. Colour Music Concert Series, ANU Drill Hall Gallery.
URL Video BibTex
@inproceedings{Martin:2014ab, address = {Canberra, Australia}, author = {Martin, Charles and Hopgood, Christina and Griffiths, Jonathan and Lam, Yvonne}, booktitle = {{Colour Music Concert Series}}, date-added = {2018-02-18 21:58:20 +0000}, date-modified = {2018-02-21 21:16:17 +0000}, keywords = {artistic-performance, non-refereed}, month = aug, note = {Musical Performance for iPad Ensemble}, publisher = {ANU Drill Hall Gallery}, title = {{Colour Music Concert with Ensemble Metatone}}, url = {https://youtu.be/ICeHWlNRsgU}, video = {https://youtu.be/ICeHWlNRsgU}, year = {2014}, bdsk-url-1 = {https://youtu.be/ICeHWlNRsgU} }
-
Charles Martin and Henry Gardner. 2014. Preserving Musical Performance on Touch-Screens. Proceedings of the CHI 2014 Workshop on Curating the Digital: Spaces for Art and Interaction. http://doi.org/10.5281/zenodo.1175599
DOI URL Preprint PDF BibTex
@inproceedings{Martin:2014aa, address = {Toronto, Canada}, author = {Martin, Charles and Gardner, Henry}, booktitle = {Proceedings of the {CHI} 2014 Workshop on Curating the Digital: Spaces for Art and Interaction}, date-added = {2018-02-18 21:01:46 +0000}, date-modified = {2019-01-08 16:41:52 +0100}, doi = {10.5281/zenodo.1175599}, keywords = {conference-paper, workshop-paper, refereed}, month = apr, preprint = {https://metatonetransfer.com/preprints/2014-CHI-CuratingDigital-PreservingMusicalPerformance.pdf}, title = {Preserving Musical Performance on Touch-Screens}, url = {https://metatonetransfer.com/preprints/2014-CHI-CuratingDigital-PreservingMusicalPerformance.pdf}, year = {2014}, bdsk-url-1 = {https://metatonetransfer.com/preprints/2014-CHI-CuratingDigital-PreservingMusicalPerformance.pdf} }
-
Charles Martin, Henry Gardner, and Ben Swift. 2014. Exploring Percussive Gesture on iPads with Ensemble Metatone. Proceedings of the SIGCHI Conference on Human Factors in Computing Systems, ACM, pp. 1025–1028. http://doi.org/10.1145/2556288.2557226
DOI URL Preprint PDF Video BibTex
@inproceedings{Martin:2014cr, acmid = {2557226}, address = {New York, NY, USA}, author = {Martin, Charles and Gardner, Henry and Swift, Ben}, booktitle = {Proceedings of the {SIGCHI} Conference on Human Factors in Computing Systems}, date-modified = {2018-02-22 20:42:50 +0000}, doi = {10.1145/2556288.2557226}, isbn = {978-1-4503-2473-1}, keywords = {conference-paper, refereed}, location = {Toronto, Ontario, Canada}, month = apr, numpages = {4}, pages = {1025--1028}, preprint = {https://metatonetransfer.com/preprints/2014-CHI-ExploringPercussiveGesture.pdf}, publisher = {{ACM}}, series = {{CHI} '14}, title = {Exploring Percussive Gesture on i{P}ads with {E}nsemble {M}etatone}, url = {http://doi.acm.org/10.1145/2556288.2557226}, video = {https://youtu.be/NqdxCteYRFk}, year = {2014}, bdsk-url-1 = {http://doi.acm.org/10.1145/2556288.2557226}, bdsk-url-2 = {https://dx.doi.org/10.1145/2556288.2557226} }
-
Charles Martin. 2014. Making Improvised Music for iPad and Percussion with Ensemble Metatone. Proceedings of the Australasian Computer Music Conference, pp. 115–118.
URL Preprint PDF BibTex
@inproceedings{Martin:2014jk, author = {Martin, Charles}, booktitle = {Proceedings of the {Australasian} Computer Music Conference}, date-modified = {2018-02-21 21:30:45 +0000}, keywords = {conference-paper, non-refereed, artist-talk}, month = jul, pages = {115--118}, preprint = {https://metatonetransfer.com/preprints/2014-ACMC-MakingImprovisedMusic.pdf}, title = {Making Improvised Music for i{P}ad and Percussion with {E}nsemble {M}etatone}, url = {http://hdl.handle.net/1885/95314}, year = {2014}, bdsk-url-1 = {http://hdl.handle.net/1885/95314} }
-
Charles Martin, Henry Gardner, and Ben Swift. 2014. MetaTravels and MetaLonsdale: iPad Apps for Percussive Improvisation. CHI ’14 Extended Abstracts on Human Factors in Computing Systems, ACM, pp. 547–550. http://doi.org/10.1145/2559206.2574805
DOI Preprint PDF BibTex
@inproceedings{Martin:2014xp, acmid = {2574805}, address = {New York, NY, USA}, author = {Martin, Charles and Gardner, Henry and Swift, Ben}, booktitle = {{CHI} '14 Extended Abstracts on Human Factors in Computing Systems}, date-modified = {2018-02-21 21:10:06 +0000}, doi = {10.1145/2559206.2574805}, isbn = {978-1-4503-2474-8}, keywords = {conference-paper, refereed, demonstration}, location = {Toronto, Ontario, Canada}, month = apr, numpages = {4}, pages = {547--550}, preprint = {https://metatonetransfer.com/preprints/2014-CHI-MetaTravelsMetaLonsdale.pdf}, publisher = {ACM}, series = {{CHI} {EA} '14}, title = {{M}eta{T}ravels and {M}eta{L}onsdale: {iPad} Apps for Percussive Improvisation}, year = {2014}, bdsk-url-1 = {https://dx.doi.org/10.1145/2559206.2574805} }
2013
-
Maria Finkelmeier, Charles Martin, and Jacob Remington. 2013. Ensemble Evolution: Showcase Concert. Percussive Arts Society International Convention.
URL Video BibTex
@inproceedings{Finkelmeier:2013aa, author = {Finkelmeier, Maria and Martin, Charles and Remington, Jacob}, booktitle = {Percussive Arts Society International Convention}, date-added = {2018-02-18 22:09:03 +0000}, date-modified = {2019-01-08 16:44:00 +0100}, keywords = {artistic-performance, non-refereed}, month = nov, series = {PASIC '13}, title = {{Ensemble Evolution}: Showcase Concert}, url = {https://youtu.be/zqnffMAHbPA}, video = {https://youtu.be/zqnffMAHbPA}, year = {2013}, bdsk-url-1 = {https://youtu.be/zqnffMAHbPA} }
-
Charles Martin. 2013. Nordlig Vinter for Vibraphone and iOS Devices. Musical Program of the International Conference on New Interfaces for Musical Expression.
Abstract Video BibTexNordlig Vinter is a flexible length work for vibraphone and iOS devices. The work recalls the my first experiences of the icy winter in Piteå, Northern Sweden and was composed while in residency at the School of Music there. The work includes sections of composed material in a minimal jazz style where themes on the vibraphone are presented in the context of computer-generated sounds constructed from field recordings taken in Piteå. These sections are contrasted by elements of free improvisation where the vibraphone part goes into musical dialogue with the computer elements that effect and react to the vibraphone sound.
@inproceedings{Martin:2013aa, address = {Daejeon, Republic of Korea}, author = {Martin, Charles}, booktitle = {Musical Program of the International Conference on New Interfaces for Musical Expression}, date-added = {2018-02-18 20:00:22 +0000}, date-modified = {2018-02-22 16:02:21 +0000}, keywords = {artistic-performance, refereed,}, month = may, series = {NIME '13}, title = {Nordlig Vinter for Vibraphone and iOS Devices}, video = {https://youtu.be/qjCkrCMpCtU}, year = {2013} }
-
Charles Martin. 2013. Performing with a Mobile Computer System for Vibraphone. Proceedings of the International Conference on New Interfaces for Musical Expression, Graduate School of Culture Technology, KAIST, pp. 377–380. http://doi.org/10.5281/zenodo.1178602
Abstract DOI URL arXiv Preprint PDF BibTexThis paper describes the development of an Apple iPhone based mobile computersystem for vibraphone and its use in a series of the author’s performanceprojects in 2011 and 2012.This artistic research was motivated by a desire to develop an alternative tolaptop computers for the author’s existing percussion and computer performancepractice. The aims were to develop a light, compact and flexible system usingmobile devices that would allow computer music to infiltrate solo and ensembleperformance situations where it is difficult to use a laptop computer.The project began with a system that brought computer elements to NordligVinter, a suite of percussion duos, using an iPhone, RjDj, Pure Data and ahome-made pickup system. This process was documented with video recordings andanalysed using ethnographic methods.The mobile computer music setup proved to be elegant and convenient inperformance situations with very little time and space to set up, as well as inperformance classes and workshops. The simple mobile system encouragedexperimentation and the platforms used enabled sharing with a wider audience.
@inproceedings{Martin:2013, address = {Daejeon, Republic of Korea}, archiveprefix = {arXiv}, author = {Martin, Charles}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-modified = {2020-12-04 14:18:05 +1100}, doi = {10.5281/zenodo.1178602}, editor = {Yeo, W. and Lee, K. and Sigman, A. and H., Ji and Wakefield, G.}, eprint = {2012.00265}, keywords = {conference-paper, refereed}, month = may, pages = {377--380}, preprint = {https://metatonetransfer.com/preprints/2013-NIME-MobileComputerSystem.pdf}, publisher = {Graduate School of Culture Technology, KAIST}, title = {Performing with a Mobile Computer System for Vibraphone}, url = {http://nime.org/proceedings/2013/nime2013_121.pdf}, year = {2013}, bdsk-url-1 = {http://nime.org/proceedings/2013/nime2013_121.pdf} }
-
Charles Martin. 2013. Integrating Mobile Music with Percussion Performance Practice. Proceedings of the International Computer Music Conference, pp. 437–440.
Abstract URL Preprint PDF BibTexThis paper describes a series of musical works designed to integrate mobile computer instruments into a percussion ensemble performance practice. The works were motivated by the author’s desire to introduce computer music elements to non-programmer members of the percussion group Ensemble Evolution. Each of the works used simple setups with Apple iOS devices in order to facilitate rehearsals and increase the performers’ engagement with the computer elements of the works. This artistic research considers the performance practices that are enabled and demanded when complementing acoustic percussion instruments with mobile music devices. The first two works, 3p3p and Nordlig Vinter used computer music elements composed in Pure Data running on iPhones using RjDj in the context of semi-composed works for percussion. The third work, Snow Music was a collaboratively developed improvised work using percussion and a native iOS app developed with libpd. An ethnographic analysis of the preparation and performance of the three works shows a development in the role of mobile devices in the performances and the emergence of performance practices using both natural interactions and control of generative processes.
@inproceedings{Martin:2013fk, address = {Perth, Australia}, author = {Martin, Charles}, booktitle = {Proceedings of the International Computer Music Conference}, date-modified = {2018-02-21 16:16:58 +0000}, keywords = {conference-paper, refereed}, month = aug, pages = {437--440}, preprint = {https://metatonetransfer.com/preprints/2013-ICMC-IntegratingMobileMusic.pdf}, title = {Integrating Mobile Music with Percussion Performance Practice}, url = {http://hdl.handle.net/2027/spo.bbp2372.2013.073}, year = {2013}, bdsk-url-1 = {http://hdl.handle.net/2027/spo.bbp2372.2013.073} }
2012
-
Charles Martin. 2012. Creating Mobile Computer Music for Percussionists: Snow Music. Proceedings of the Australasian Computer Music Conference, Australasian Computer Music Association. http://doi.org/10.13140/RG.2.1.5150.5687
DOI Preprint PDF BibTex
@inproceedings{Martin:2012fk, address = {The Basin, Australia}, author = {Martin, Charles}, booktitle = {Proceedings of the {Australasian} Computer Music Conference}, date-modified = {2018-02-21 21:30:57 +0000}, doi = {10.13140/RG.2.1.5150.5687}, editor = {Hitchcock, Matt and Taylor, Jodie}, keywords = {conference-paper, non-refereed, artist-talk}, month = jul, preprint = {https://metatonetransfer.com/preprints/2012-ACMC-SnowMusic.pdf}, publisher = {Australasian Computer Music Association}, title = {Creating Mobile Computer Music for Percussionists: Snow Music}, year = {2012}, bdsk-url-1 = {https://dx.doi.org/10.13140/RG.2.1.5150.5687} }
2011
-
Charles Martin and Chi-Hsia Lai. 2011. Strike on Stage: a Percussion and Media Performance. Proceedings of the International Conference on New Interfaces for Musical Expression, pp. 142–143. http://doi.org/10.5281/zenodo.1178103
Abstract DOI URL arXiv Preprint PDF Video BibTexThis paper describes Strike on Stage, an interface and cor- responding audio-visual performance work developed and performed in 2010 by percussionists and media artists Chi-Hsia Lai and Charles Martin. The concept of Strike on Stage is to integrate computer visuals and sound into an improvised percussion performance. A large projection surface is positioned directly behind the performers, while a computer vision system tracks their movements. The setup allows computer visualisation and sonification to be directly responsive and unified with the performers’ gestures.
@inproceedings{Martin:2011oz, address = {Oslo, Norway}, archiveprefix = {arXiv}, author = {Martin, Charles and Lai, Chi-Hsia}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-modified = {2020-12-04 14:17:26 +1100}, doi = {10.5281/zenodo.1178103}, editor = {Jensenius, Alexander R. and Tveit, Anders and Godoy, Rolf I. and Overholt, Dan}, eprint = {2012.00250}, keywords = {conference-paper, refereed,}, month = may, pages = {142--143}, preprint = {https://metatonetransfer.com/preprints/2011-NIME-StrikeOnStage.pdf}, title = {{Strike on Stage}: a Percussion and Media Performance}, url = {http://www.nime.org/proceedings/2011/nime2011_142.pdf}, video = {https://youtu.be/2TnOI8Ac3PY}, year = {2011}, bdsk-url-1 = {http://www.nime.org/proceedings/2011/nime2011_142.pdf} }
2010
-
Chi-Hsia Lai and Charles Martin. 2010. Strike on Stage 1.2 for percussion and interactive media. Musical Program of the Australasian Computer Music Conference.
URL Video BibTex
@inproceedings{Lai:2010ab, author = {Lai, Chi-Hsia and Martin, Charles}, booktitle = {Musical Program of the {Australasian} Computer Music Conference}, date-added = {2018-02-18 20:11:54 +0000}, date-modified = {2018-02-21 21:08:11 +0000}, editor = {Opie, Timothy}, keywords = {artistic-performance, conference, non-refereed}, month = jun, title = {{Strike on Stage} 1.2 for percussion and interactive media}, url = {https://vimeo.com/13543935}, video = {https://vimeo.com/13543935}, year = {2010}, bdsk-url-1 = {https://vimeo.com/13543935} }
-
Chi-Hsia Lai and Charles Martin. 2010. Strike on Stage for percussion and interactive media. Musical Program of the International Conference on New Interfaces for Musical Expression.
URL Video BibTex
@inproceedings{Lai:2010aa, author = {Lai, Chi-Hsia and Martin, Charles}, booktitle = {Musical Program of the International Conference on New Interfaces for Musical Expression}, date-added = {2018-02-18 20:04:42 +0000}, date-modified = {2018-02-22 16:00:30 +0000}, keywords = {artistic-performance, conference, non-refereed}, month = jun, title = {{Strike on Stage} for percussion and interactive media}, url = {http://www.nime.org/2010/program.html#Performances}, video = {https://vimeo.com/14253118}, year = {2010}, bdsk-url-1 = {http://www.nime.org/2010/program.html#Performances} }
-
Charles Martin, Benjamin Forster, and Hanna Cormick. 2010. Cross-Artform Performance Using Networked Interfaces: Last Man to Die’s Vital LMTD. Proceedings of the International Conference on New Interfaces for Musical Expression, pp. 204–207. http://doi.org/10.5281/zenodo.1177843
Abstract DOI URL arXiv Preprint PDF Video BibTexIn 2009 the cross artform group, Last Man to Die, presenteda series of performances using new interfaces and networkedperformance to integrate the three artforms of its members(actor, Hanna Cormick, visual artist, Benjamin Forster andpercussionist, Charles Martin). This paper explains ourartistic motivations and design for a computer vision surfaceand networked heartbeat sensor as well as the experience ofmounting our first major work, Vital LMTD.
@inproceedings{Martin:2010dk, address = {Sydney, Australia}, archiveprefix = {arXiv}, author = {Martin, Charles and Forster, Benjamin and Cormick, Hanna}, booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression}, date-modified = {2020-12-04 14:16:27 +1100}, doi = {10.5281/zenodo.1177843}, editor = {Beilharz, Kirsty and Bongers, Bert and Johnston, Andrew and Ferguson, Sam}, eprint = {2012.00249}, keywords = {conference-paper, refereed,}, month = jun, pages = {204--207}, preprint = {https://metatonetransfer.com/preprints/2010-NIME-CrossArtforPerformance.pdf}, title = {Cross-Artform Performance Using Networked Interfaces: {L}ast {M}an to {D}ie's {V}ital {LMTD}}, url = {http://www.nime.org/proceedings/2010/nime2010_204.pdf}, video = {https://vimeo.com/46359018}, year = {2010}, bdsk-url-1 = {http://www.nime.org/proceedings/2010/nime2010_204.pdf} }
-
Charles Martin, Benjamin Forster, and Hanna Cormick. 2010. Audience Interactive Performance in “The Last Man to Die". Proceedings of the Australasian Computer Music Conference, Australasian Computer Music Association, pp. 89–91.
URL Preprint PDF BibTex
@inproceedings{Martin:2010rw, address = {The Basin, Australia}, author = {Martin, Charles and Forster, Benjamin and Cormick, Hanna}, booktitle = {Proceedings of the {Australasian} Computer Music Conference}, date-modified = {2018-02-21 21:30:35 +0000}, editor = {Opie, Timothy}, keywords = {conference-paper, non-refereed, artist-talk}, month = jun, pages = {89--91}, preprint = {https://metatonetransfer.com/preprints/2010-ACMC-AudienceInteractivePerformance.pdf}, publisher = {Australasian Computer Music Association}, title = {Audience Interactive Performance in ``{The Last Man to Die}"}, url = {http://hdl.handle.net/1885/101945}, year = {2010}, bdsk-url-1 = {http://hdl.handle.net/1885/101945} }